Esempio n. 1
0
    def compress_and_check_dir(self, extension):
        hash_map_1 = {}
        for i in xrange(self.sys_random.randint(10, 20)):
            if i % 2 == 0:
                compressdir = tempfile.mkdtemp(dir=self.compressdir)
            else:
                compressdir = self.compressdir
            str_length = self.sys_random.randint(30, 50)
            fd, filename = tempfile.mkstemp(dir=compressdir, text=True)
            with os.fdopen(fd, 'w') as f:
                f.write(data_factory.generate_random_string(str_length))
            relative_path = filename.replace(self.compressdir, '')
            hash_map_1[relative_path] = crypto.hash_file(filename)

        archive_filename = self.compressdir + extension
        archive.compress(archive_filename, self.compressdir)
        archive.uncompress(archive_filename, self.decompressdir)

        hash_map_2 = {}
        for root, _, files in os.walk(self.decompressdir):
            for name in files:
                file_path = os.path.join(root, name)
                relative_path = file_path.replace(self.decompressdir, '')
                hash_map_2[relative_path] = crypto.hash_file(file_path)

        self.assertEqual(hash_map_1, hash_map_2)
Esempio n. 2
0
    def run(self, params_list):
        """
        Run one or more tests and report with test result.

        :param params_list: a list of param dicts.

        :return: a list of test failures.
        """
        failures = []
        urls = [x['id'] for x in params_list]
        self.result.urls = urls
        self.result.setup()
        results = self.run_test(' '.join(urls))
        remote_log_dir = os.path.dirname(results['debuglog'])
        self.result.start_tests()
        for tst in results['tests']:
            test = Test(name=tst['test'],
                        time=tst['time'],
                        status=tst['status'])
            state = test.get_state()
            self.result.start_test(state)
            self.result.check_test(state)
            if not status.mapping[state['status']]:
                failures.append(state['tagged_name'])
        self.result.end_tests()
        local_log_dir = os.path.dirname(self.result.stream.debuglog)
        zip_filename = remote_log_dir + '.zip'
        zip_path_filename = os.path.join(local_log_dir, os.path.basename(zip_filename))
        self.result.vm.remote.receive_files(local_log_dir, zip_filename)
        archive.uncompress(zip_path_filename, local_log_dir)
        os.remove(zip_path_filename)
        self.result.tear_down()
        return failures
Esempio n. 3
0
    def compress_and_check_dir(self, extension):
        hash_map_1 = {}
        for i in xrange(self.sys_random.randint(10, 20)):
            if i % 2 == 0:
                compressdir = tempfile.mkdtemp(dir=self.compressdir)
            else:
                compressdir = self.compressdir
            str_length = self.sys_random.randint(30, 50)
            fd, filename = tempfile.mkstemp(dir=compressdir, text=True)
            os.write(fd, data_factory.generate_random_string(str_length))
            relative_path = filename.replace(self.compressdir, '')
            hash_map_1[relative_path] = crypto.hash_file(filename)

        archive_filename = self.compressdir + extension
        archive.compress(archive_filename, self.compressdir)
        archive.uncompress(archive_filename, self.decompressdir)

        hash_map_2 = {}
        for root, _, files in os.walk(self.decompressdir):
            for name in files:
                file_path = os.path.join(root, name)
                relative_path = file_path.replace(self.decompressdir, '')
                hash_map_2[relative_path] = crypto.hash_file(file_path)

        self.assertEqual(hash_map_1, hash_map_2)
Esempio n. 4
0
 def compress_and_check_file(self, extension):
     str_length = self.sys_random.randint(30, 50)
     fd, filename = tempfile.mkstemp(dir=self.basedir, text=True)
     os.write(fd, data_factory.generate_random_string(str_length))
     original_hash = crypto.hash_file(filename)
     dstfile = filename + extension
     archive_filename = os.path.join(self.basedir, dstfile)
     archive.compress(archive_filename, filename)
     archive.uncompress(archive_filename, self.decompressdir)
     decompress_file = os.path.join(self.decompressdir,
                                    os.path.basename(filename))
     decompress_hash = crypto.hash_file(decompress_file)
     self.assertEqual(original_hash, decompress_hash)
Esempio n. 5
0
 def compress_and_check_file(self, extension):
     str_length = self.sys_random.randint(30, 50)
     fd, filename = tempfile.mkstemp(dir=self.basedir, text=True)
     os.write(fd, data_factory.generate_random_string(str_length))
     original_hash = crypto.hash_file(filename)
     dstfile = filename + extension
     archive_filename = os.path.join(self.basedir, dstfile)
     archive.compress(archive_filename, filename)
     archive.uncompress(archive_filename, self.decompressdir)
     decompress_file = os.path.join(self.decompressdir,
                                    os.path.basename(filename))
     decompress_hash = crypto.hash_file(decompress_file)
     self.assertEqual(original_hash, decompress_hash)
Esempio n. 6
0
    def test_zip_extra_attrs(self):
        """
        Check that utils.archive reflects extra attrs of file like symlinks
        and file permissions.
        """
        def get_path(*args):
            """Get path with decompressdir prefix"""
            return os.path.join(self.decompressdir, *args)

        # File types
        zip_path = os.path.abspath(
            os.path.join(
                os.path.dirname(__file__),
                os.path.pardir,
                os.path.pardir,
                ".data",
                "test_archive__symlinks.zip",
            ))
        # TODO: Handle permission correctly for all users
        # The umask is not yet handled by utils.archive, hardcode it for now
        os.umask(2)
        archive.uncompress(zip_path, self.decompressdir)
        self.assertTrue(os.path.islink(get_path("link_to_dir")))
        self.assertTrue(os.path.islink(get_path("link_to_file")))
        self.assertTrue(os.path.islink(get_path("link_to_file2")))
        self.assertTrue(os.path.islink(get_path("dir", "2nd_link_to_file")))
        self.assertTrue(
            os.path.islink(get_path("dir", "link_to_link_to_file2")))
        self.assertTrue(os.path.islink(get_path("dir", "2nd_link_to_file")))
        self.assertTrue(
            os.path.islink(get_path("link_to_dir", "2nd_link_to_file")))
        self.assertTrue(os.path.isfile(get_path("file")))
        self.assertTrue(os.path.isfile(get_path("dir", "file2")))
        self.assertTrue(os.path.isfile(get_path("link_to_dir", "file2")))
        act = os.path.realpath(get_path("link_to_dir",
                                        "link_to_link_to_file2"))
        exp = get_path("dir", "file2")
        self.assertEqual(act, exp)
        self.assertEqual(os.path.realpath(get_path("link_to_dir")),
                         get_path("dir"))
        # File permissions
        self.assertEqual(
            os.stat(get_path("dir", "file2")).st_mode & 0o777, 0o664)
        self.assertEqual(os.stat(get_path("file")).st_mode & 0o777, 0o753)
        self.assertEqual(os.stat(get_path("dir")).st_mode & 0o777, 0o775)
        self.assertEqual(
            os.stat(get_path("link_to_file2")).st_mode & 0o777, 0o664)
        self.assertEqual(
            os.stat(get_path("link_to_dir")).st_mode & 0o777, 0o775)
        self.assertEqual(
            os.stat(get_path("link_to_file")).st_mode & 0o777, 0o753)
Esempio n. 7
0
 def test_empty_tbz2(self):
     ret = archive.uncompress(
         os.path.join(BASEDIR, "selftests", ".data", "empty.tar.bz2"),
         self.decompressdir,
     )
     self.assertEqual(ret, None, (f"Empty archive should return None "
                                  f"({ret})"))
Esempio n. 8
0
    def download(self):
        metadata = {
            "type": "vmimage",
            "name": self.name,
            "version": self.version,
            "arch": self.arch,
            "build": self.build,
        }
        if isinstance(self.cache_dir, str):
            cache_dirs = [self.cache_dir]
        else:
            cache_dirs = self.cache_dir
        asset_path = asset.Asset(
            name=self.url,
            asset_hash=self.checksum,
            algorithm=self.algorithm,
            locations=None,
            cache_dirs=cache_dirs,
            expire=None,
            metadata=metadata,
        ).fetch()

        if archive.is_archive(asset_path):
            uncompressed_path = os.path.splitext(asset_path)[0]
            asset_path = archive.uncompress(asset_path, uncompressed_path)
        self._base_image = asset_path
        return self._base_image
Esempio n. 9
0
 def test_zip_extra_attrs(self):
     """
     Check that utils.archive reflects extra attrs of file like symlinks
     and file permissions.
     """
     def get_path(*args):
         """ Get path with decompressdir prefix """
         return os.path.join(self.decompressdir, *args)
     # File types
     zip_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
                                             os.path.pardir, ".data",
                                             "test_archive__symlinks.zip"))
     # TODO: Handle permission correctly for all users
     # The umask is not yet handled by utils.archive, hardcode it for now
     os.umask(2)
     archive.uncompress(zip_path, self.decompressdir)
     self.assertTrue(os.path.islink(get_path("link_to_dir")))
     self.assertTrue(os.path.islink(get_path("link_to_file")))
     self.assertTrue(os.path.islink(get_path("link_to_file2")))
     self.assertTrue(os.path.islink(get_path("dir", "2nd_link_to_file")))
     self.assertTrue(os.path.islink(get_path("dir",
                                             "link_to_link_to_file2")))
     self.assertTrue(os.path.islink(get_path("dir", "2nd_link_to_file")))
     self.assertTrue(os.path.islink(get_path("link_to_dir",
                                             "2nd_link_to_file")))
     self.assertTrue(os.path.isfile(get_path("file")))
     self.assertTrue(os.path.isfile(get_path("dir", "file2")))
     self.assertTrue(os.path.isfile(get_path("link_to_dir", "file2")))
     act = os.path.realpath(get_path("link_to_dir",
                                     "link_to_link_to_file2"))
     exp = get_path("dir", "file2")
     self.assertEqual(act, exp)
     self.assertEqual(os.path.realpath(get_path("link_to_dir")),
                      get_path("dir"))
     # File permissions
     self.assertEqual(os.stat(get_path("dir", "file2")).st_mode & 0o777,
                      0o664)
     self.assertEqual(os.stat(get_path("file")).st_mode & 0o777, 0o753)
     self.assertEqual(os.stat(get_path("dir")).st_mode & 0o777, 0o775)
     self.assertEqual(os.stat(get_path("link_to_file2")).st_mode & 0o777,
                      0o664)
     self.assertEqual(os.stat(get_path("link_to_dir")).st_mode & 0o777,
                      0o775)
     self.assertEqual(os.stat(get_path("link_to_file")).st_mode & 0o777,
                      0o753)
Esempio n. 10
0
    def run_suite(self, test_suite, mux, timeout):
        """
        Run one or more tests and report with test result.

        :param params_list: a list of param dicts.
        :param mux: A multiplex iterator (unused here)

        :return: a list of test failures.
        """
        del test_suite     # using self.result.urls instead
        del mux            # we're not using multiplexation here
        if not timeout:     # avoid timeout = 0
            timeout = None
        failures = []
        self.result.setup()
        results = self.run_test(self.result.urls, timeout)
        remote_log_dir = os.path.dirname(results['debuglog'])
        self.result.start_tests()
        for tst in results['tests']:
            test = RemoteTest(name=tst['test'],
                              time=tst['time'],
                              start=tst['start'],
                              end=tst['end'],
                              status=tst['status'],
                              logdir=tst['logdir'],
                              logfile=tst['logfile'],
                              fail_reason=tst['fail_reason'])
            state = test.get_state()
            self.result.start_test(state)
            self.result.check_test(state)
            if not status.mapping[state['status']]:
                failures.append(state['tagged_name'])
        local_log_dir = os.path.dirname(self.result.stream.debuglog)
        zip_filename = remote_log_dir + '.zip'
        zip_path_filename = os.path.join(local_log_dir,
                                         os.path.basename(zip_filename))
        self.result.remote.receive_files(local_log_dir, zip_filename)
        archive.uncompress(zip_path_filename, local_log_dir)
        os.remove(zip_path_filename)
        self.result.end_tests()
        self.result.tear_down()
        return failures
Esempio n. 11
0
    def run_suite(self, test_suite, mux, timeout):
        """
        Run one or more tests and report with test result.

        :param params_list: a list of param dicts.
        :param mux: A multiplex iterator (unused here)

        :return: a list of test failures.
        """
        del test_suite  # using self.result.urls instead
        del mux  # we're not using multiplexation here
        if not timeout:  # avoid timeout = 0
            timeout = None
        failures = []
        self.result.setup()
        results = self.run_test(self.result.urls, timeout)
        remote_log_dir = os.path.dirname(results['debuglog'])
        self.result.start_tests()
        for tst in results['tests']:
            test = RemoteTest(name=tst['test'],
                              time=tst['time'],
                              start=tst['start'],
                              end=tst['end'],
                              status=tst['status'],
                              logdir=tst['logdir'],
                              logfile=tst['logfile'],
                              fail_reason=tst['fail_reason'])
            state = test.get_state()
            self.result.start_test(state)
            self.result.check_test(state)
            if not status.mapping[state['status']]:
                failures.append(state['tagged_name'])
        local_log_dir = os.path.dirname(self.result.stream.debuglog)
        zip_filename = remote_log_dir + '.zip'
        zip_path_filename = os.path.join(local_log_dir,
                                         os.path.basename(zip_filename))
        self.result.remote.receive_files(local_log_dir, zip_filename)
        archive.uncompress(zip_path_filename, local_log_dir)
        os.remove(zip_path_filename)
        self.result.end_tests()
        self.result.tear_down()
        return failures
Esempio n. 12
0
    def test_alpha_clipper(self):
        """
        :avocado: tags=arch:alpha
        :avocado: tags=machine:clipper
        """
        kernel_url = ('http://archive.debian.org/debian/dists/lenny/main/'
                      'installer-alpha/current/images/cdrom/vmlinuz')
        kernel_hash = '3a943149335529e2ed3e74d0d787b85fb5671ba3'
        kernel_path = self.fetch_asset(kernel_url, asset_hash=kernel_hash)

        uncompressed_kernel = archive.uncompress(kernel_path, self.workdir)

        self.vm.set_console()
        kernel_command_line = self.KERNEL_COMMON_COMMAND_LINE + 'console=ttyS0'
        self.vm.add_args('-nodefaults', '-kernel', uncompressed_kernel,
                         '-append', kernel_command_line)
        self.vm.launch()
        console_pattern = 'Kernel command line: %s' % kernel_command_line
        self.wait_for_console_pattern(console_pattern)
Esempio n. 13
0
    def test_alpha_clipper(self):
        """
        :avocado: tags=arch:alpha
        :avocado: tags=machine:clipper
        """
        kernel_url = ('http://archive.debian.org/debian/dists/lenny/main/'
                      'installer-alpha/20090123lenny10/images/cdrom/vmlinuz')
        kernel_hash = '3a943149335529e2ed3e74d0d787b85fb5671ba3'
        kernel_path = self.fetch_asset(kernel_url, asset_hash=kernel_hash)

        uncompressed_kernel = archive.uncompress(kernel_path, self.workdir)

        kernel_command_line = self.KERNEL_COMMON_COMMAND_LINE + 'console=ttyS0'
        console_pattern = 'Kernel command line: %s' % kernel_command_line
        self.run_rr(uncompressed_kernel,
                    kernel_command_line,
                    console_pattern,
                    shift=9,
                    args=('-nodefaults', ))
Esempio n. 14
0
    def test_uboot(self):
        """
        U-Boot and checks that the console is operational.

        :avocado: tags=arch:rx
        :avocado: tags=machine:gdbsim-r5f562n8
        :avocado: tags=endian:little
        """
        uboot_url = ('https://acc.dl.osdn.jp/users/23/23888/u-boot.bin.gz')
        uboot_hash = '9b78dbd43b40b2526848c0b1ce9de02c24f4dcdb'
        uboot_path = self.fetch_asset(uboot_url, asset_hash=uboot_hash)
        uboot_path = archive.uncompress(uboot_path, self.workdir)

        self.vm.set_console()
        self.vm.add_args('-bios', uboot_path, '-no-reboot')
        self.vm.launch()
        uboot_version = 'U-Boot 2016.05-rc3-23705-ga1ef3c71cb-dirty'
        wait_for_console_pattern(self, uboot_version)
        gcc_version = 'rx-unknown-linux-gcc (GCC) 9.0.0 20181105 (experimental)'
Esempio n. 15
0
    def test_alpha_clipper(self):
        """
        :avocado: tags=arch:alpha
        :avocado: tags=machine:clipper
        """
        kernel_url = ('http://archive.debian.org/debian/dists/lenny/main/'
                      'installer-alpha/current/images/cdrom/vmlinuz')
        kernel_hash = '3a943149335529e2ed3e74d0d787b85fb5671ba3'
        kernel_path = self.fetch_asset(kernel_url, asset_hash=kernel_hash)

        uncompressed_kernel = archive.uncompress(kernel_path, self.workdir)

        self.vm.set_machine('clipper')
        self.vm.set_console()
        kernel_command_line = self.KERNEL_COMMON_COMMAND_LINE + 'console=ttyS0'
        self.vm.add_args('-vga', 'std',
                         '-kernel', uncompressed_kernel,
                         '-append', kernel_command_line)
        self.vm.launch()
        console_pattern = 'Kernel command line: %s' % kernel_command_line
        self.wait_for_console_pattern(console_pattern)
Esempio n. 16
0
    def run_suite(self,
                  test_suite,
                  variants,
                  timeout=0,
                  replay_map=None,
                  suite_order="variants-per-test"):
        """
        Run one or more tests and report with test result.

        :param params_list: a list of param dicts.
        :param variants: A varianter iterator (unused here)

        :return: a set with types of test failures.
        """
        del test_suite  # using self.job.references instead
        del variants  # we're not using multiplexation here
        if suite_order != "variants-per-test" and suite_order is not None:
            raise exceptions.JobError("execution-order %s is not supported "
                                      "for remote execution." % suite_order)
        del suite_order  # suite_order is ignored for now
        if not timeout:  # avoid timeout = 0
            timeout = None
        summary = set()

        stdout_backup = sys.stdout
        stderr_backup = sys.stderr
        fabric_debugfile = os.path.join(self.job.logdir, 'remote.log')
        paramiko_logger = logging.getLogger('paramiko')
        fabric_logger = logging.getLogger('avocado.fabric')
        remote_logger = logging.getLogger('avocado.remote')
        app_logger = logging.getLogger('avocado.debug')
        fmt = ('%(asctime)s %(module)-10.10s L%(lineno)-.4d %('
               'levelname)-5.5s| %(message)s')
        formatter = logging.Formatter(fmt=fmt, datefmt='%H:%M:%S')
        file_handler = logging.FileHandler(filename=fabric_debugfile)
        file_handler.setFormatter(formatter)
        fabric_logger.addHandler(file_handler)
        paramiko_logger.addHandler(file_handler)
        remote_logger.addHandler(file_handler)
        if self.job.args.show_job_log:
            output.add_log_handler(paramiko_logger.name)
        logger_list = [output.LOG_JOB]
        sys.stdout = output.LoggingFile(loggers=logger_list)
        sys.stderr = output.LoggingFile(loggers=logger_list)
        try:
            try:
                self.setup()
                avocado_installed, _ = self.check_remote_avocado()
                if not avocado_installed:
                    raise exceptions.JobError('Remote machine does not seem to'
                                              ' have avocado installed')
            except Exception as details:
                stacktrace.log_exc_info(sys.exc_info(), logger=LOG_JOB)
                raise exceptions.JobError(details)
            results = self.run_test(self.job.references, timeout)
            remote_log_dir = os.path.dirname(results['debuglog'])
            self.result.tests_total = results['total']
            local_log_dir = self.job.logdir
            for tst in results['tests']:
                name = tst['test'].split('-', 1)
                name = [name[0]] + name[1].split(';')
                if len(name) == 3:
                    name[2] = {"variant_id": name[2]}
                name = TestID(*name, no_digits=-1)
                state = dict(name=name,
                             time_elapsed=tst['time'],
                             time_start=tst['start'],
                             time_end=tst['end'],
                             status=tst['status'],
                             logdir=tst['logdir'],
                             logfile=tst['logfile'],
                             fail_reason=tst['fail_reason'],
                             job_logdir=local_log_dir,
                             job_unique_id='')
                self.result.start_test(state)
                self.job._result_events_dispatcher.map_method(
                    'start_test', self.result, state)
                self.result.check_test(state)
                self.job._result_events_dispatcher.map_method(
                    'end_test', self.result, state)
                if state['status'] == "INTERRUPTED":
                    summary.add("INTERRUPTED")
                elif not status.mapping[state['status']]:
                    summary.add("FAIL")
            zip_filename = remote_log_dir + '.zip'
            zip_path_filename = os.path.join(local_log_dir,
                                             os.path.basename(zip_filename))
            self.remote.receive_files(local_log_dir, zip_filename)
            archive.uncompress(zip_path_filename, local_log_dir)
            os.remove(zip_path_filename)
            self.result.end_tests()
            self.job._result_events_dispatcher.map_method(
                'post_tests', self.job)
        finally:
            try:
                self.tear_down()
            except Exception as details:
                stacktrace.log_exc_info(sys.exc_info(), logger=LOG_JOB)
                raise exceptions.JobError(details)
            sys.stdout = stdout_backup
            sys.stderr = stderr_backup
        return summary
Esempio n. 17
0
 def test_build_run(self):
     hello_src = os.path.join(self.workdir, "hello-2.9")
     archive.uncompress(self.hello, self.workdir)
     build.configure(hello_src)
     build.make(hello_src)
     process.run(os.path.join(hello_src, "src", "hello"))
Esempio n. 18
0
    def run_suite(self, test_suite, variants, timeout=0, replay_map=None,
                  suite_order="variants-per-test"):
        """
        Run one or more tests and report with test result.

        :param params_list: a list of param dicts.
        :param variants: A varianter iterator (unused here)

        :return: a set with types of test failures.
        """
        del test_suite     # using self.job.references instead
        del variants            # we're not using multiplexation here
        if suite_order != "variants-per-test" and suite_order is not None:
            raise exceptions.JobError("execution-order %s is not supported "
                                      "for remote execution." % suite_order)
        del suite_order     # suite_order is ignored for now
        if not timeout:     # avoid timeout = 0
            timeout = None
        summary = set()

        stdout_backup = sys.stdout
        stderr_backup = sys.stderr
        fabric_debugfile = os.path.join(self.job.logdir, 'remote.log')
        paramiko_logger = logging.getLogger('paramiko')
        fabric_logger = logging.getLogger('avocado.fabric')
        remote_logger = logging.getLogger('avocado.remote')
        fmt = ('%(asctime)s %(module)-10.10s L%(lineno)-.4d %('
               'levelname)-5.5s| %(message)s')
        formatter = logging.Formatter(fmt=fmt, datefmt='%H:%M:%S')
        file_handler = logging.FileHandler(filename=fabric_debugfile)
        file_handler.setFormatter(formatter)
        fabric_logger.addHandler(file_handler)
        paramiko_logger.addHandler(file_handler)
        remote_logger.addHandler(file_handler)
        if self.job.args.show_job_log:
            output.add_log_handler(paramiko_logger.name)
        logger_list = [output.LOG_JOB]
        sys.stdout = output.LoggingFile(loggers=logger_list)
        sys.stderr = output.LoggingFile(loggers=logger_list)
        try:
            try:
                self.setup()
                avocado_installed, _ = self.check_remote_avocado()
                if not avocado_installed:
                    raise exceptions.JobError('Remote machine does not seem to'
                                              ' have avocado installed')
            except Exception as details:
                stacktrace.log_exc_info(sys.exc_info(), logger=LOG_JOB)
                raise exceptions.JobError(details)
            results = self.run_test(self.job.references, timeout)
            remote_log_dir = os.path.dirname(results['debuglog'])
            self.result.tests_total = results['total']
            local_log_dir = self.job.logdir
            for tst in results['tests']:
                name = tst['id'].split('-', 1)
                name = [name[0]] + name[1].split(';')
                if len(name) == 3:
                    name[2] = {"variant_id": name[2]}
                name = TestID(*name, no_digits=-1)
                state = dict(name=name,
                             time_elapsed=tst['time'],
                             time_start=tst['start'],
                             time_end=tst['end'],
                             status=tst['status'],
                             logdir=tst['logdir'],
                             logfile=tst['logfile'],
                             fail_reason=tst['fail_reason'],
                             job_logdir=local_log_dir,
                             job_unique_id='')
                self.result.start_test(state)
                self.job._result_events_dispatcher.map_method('start_test',
                                                              self.result,
                                                              state)
                self.result.check_test(state)
                self.job._result_events_dispatcher.map_method('end_test',
                                                              self.result,
                                                              state)
                if state['status'] == "INTERRUPTED":
                    summary.add("INTERRUPTED")
                elif not status.mapping[state['status']]:
                    summary.add("FAIL")
            zip_filename = remote_log_dir + '.zip'
            zip_path_filename = os.path.join(local_log_dir,
                                             os.path.basename(zip_filename))
            self.remote.receive_files(local_log_dir, zip_filename)
            archive.uncompress(zip_path_filename, local_log_dir)
            os.remove(zip_path_filename)
            self.result.end_tests()
            self.job._result_events_dispatcher.map_method('post_tests',
                                                          self.job)
        finally:
            try:
                self.tear_down()
            except Exception as details:
                stacktrace.log_exc_info(sys.exc_info(), logger=LOG_JOB)
                raise exceptions.JobError(details)
            sys.stdout = stdout_backup
            sys.stderr = stderr_backup
        return summary
Esempio n. 19
0
def run(test, params, env):
    """
    Use tcpreplay to replay a pcap file, and check the guest is alive
    1) copy tcpreplay from deps directory
    2) compile the tcpreplay
    3) copy target pcap file from deps directory
    4) use the rcpreply to replay the pcap file to guest
    5) check the guest is still alive, no bsod occrued

    param test: test object
    param params: test params
    param env: test environment
    """
    def execute_host_cmd(host_cmd, timeout=60):
        """
        Execute the host_cmd on host, limited in timeout period

        param host_cmd: the host_cmd to run on host
        param timeout: the timeout for running this command
        return: the output of the host_cmd
        """
        logging.info("Executing host command: %s", host_cmd)
        cmd_result = process.run(host_cmd, timeout=timeout, shell=True)
        output = cmd_result.stdout_text
        return output

    def copy_file_from_deps(file_name, sub_dir, dst_dir="/tmp"):
        """
        Copy a file from deps directory

        param file_name: the file name
        param sub_dir: sub directory that contain the file
        param dst_dir: the target directory the file copied to
        """
        src_full_path = os.path.join(data_dir.get_deps_dir(sub_dir), file_name)
        dst_full_path = os.path.join(dst_dir, file_name)
        shutil.copyfile(src_full_path, dst_full_path)
        return dst_full_path

    vm = env.get_vm(params["main_vm"])
    vm.verify_alive()
    tcpreplay_dir = params.get("tcpreplay_dir", "tcpreplay")
    tcpreplay_file_name = params.get("tcpreplay_file_name")
    tcpreplay_compile_cmd = params.get("tcpreplay_compile_cmd")
    pcap_file_name = params.get("pcap_file_name")
    run_tcpreplay_cmd = params.get("run_tcpreplay_cmd")
    tmp_dir = params.get("tmp_dir", "/tmp")
    uncompress_dir = params.get("uncompress_dir")
    timeout = params.get_numeric("timeout", 60)

    error_context.context("Copy %s to %s" % (tcpreplay_file_name, tmp_dir),
                          logging.info)
    tcpreplay_full_path = copy_file_from_deps(tcpreplay_file_name,
                                              tcpreplay_dir, tmp_dir)

    error_context.context("Compile tcpreplay", logging.info)
    uncompress_full_path = os.path.join(tmp_dir, uncompress_dir)

    logging.info("Remove old uncompress directory")
    shutil.rmtree(uncompress_full_path, ignore_errors=True)

    logging.info("Uncompress %s to %s", tcpreplay_full_path,
                 uncompress_full_path)
    uncompress_dir = archive.uncompress(tcpreplay_full_path, tmp_dir)
    if not uncompress_dir:
        test.error("Can't uncompress %s" % tcpreplay_full_path)

    logging.info("Compile files at %s", uncompress_full_path)
    execute_host_cmd(tcpreplay_compile_cmd % uncompress_full_path, timeout)

    error_context.context("Copy %s to %s" % (pcap_file_name, tmp_dir),
                          logging.info)
    copy_file_from_deps(pcap_file_name, tcpreplay_dir, tmp_dir)

    error_context.context("Run tcpreplay with pcap file", logging.info)
    output = execute_host_cmd(run_tcpreplay_cmd)
    result = re.search(r'Successful packets:\s+(\d+)', output)
    success_packet = 0
    if result:
        success_packet = int(result.group(1))
    if success_packet != 1:
        test.fail("tcpreplay result error with output: %s" % output)

    vm.verify_alive()
Esempio n. 20
0
 def test_empty_tbz2(self):
     ret = archive.uncompress(
         os.path.join(BASEDIR, 'selftests', '.data', 'empty.tar.bz2'),
         self.decompressdir)
     self.assertEqual(ret, None,
                      "Empty archive should return None (%s)" % ret)
Esempio n. 21
0
 def test_uncompress_gzip(self):
     gz_path = os.path.join(BASEDIR, 'selftests', '.data', 'avocado.gz')
     ret = archive.uncompress(gz_path, self.decompressdir)
     self.assertEqual(ret, os.path.join(self.decompressdir, 'avocado'))
     with open(ret, 'rb') as decompressed:
         self.assertEqual(decompressed.read(), b'avocado\n')
Esempio n. 22
0
 def test_build_run(self):
     hello_src = os.path.join(self.workdir, 'hello-2.9')
     archive.uncompress(self.hello, self.workdir)
     build.configure(hello_src)
     build.make(hello_src)
     process.run(os.path.join(hello_src, 'src', 'hello'))
Esempio n. 23
0
 def test_uncompress_lzma(self):
     xz_path = os.path.join(BASEDIR, "selftests", ".data", "avocado.xz")
     ret = archive.uncompress(xz_path, self.decompressdir)
     self.assertEqual(ret, os.path.join(self.decompressdir, "avocado"))
     with open(ret, "rb") as decompressed:
         self.assertEqual(decompressed.read(), b"avocado\n")
Esempio n. 24
0
 def test_build_run(self):
     hello_src = os.path.join(self.workdir, 'hello-2.9')
     archive.uncompress(self.hello, self.workdir)
     build.configure(hello_src)
     build.make(hello_src)
     process.run(os.path.join(hello_src, 'src', 'hello'))
Esempio n. 25
0
 def test_empty_tbz2(self):
     ret = archive.uncompress(os.path.join(BASEDIR, 'selftests', '.data',
                              'empty.tar.bz2'), self.decompressdir)
     self.assertEqual(ret, None, "Empty archive should return None (%s)"
                      % ret)
Esempio n. 26
0
 def test_uncompress_gzip(self):
     gz_path = os.path.join(BASEDIR, 'selftests', '.data', 'avocado.gz')
     ret = archive.uncompress(gz_path, self.decompressdir)
     self.assertEqual(ret, os.path.join(self.decompressdir, 'avocado'))
     with open(ret, 'rb') as decompressed:
         self.assertEqual(decompressed.read(), b'avocado\n')