Esempio n. 1
0
    def str_filesystem(self):
        """
        Test ID in a format suitable for use in file systems

        The string returned should be safe to be used as a file or
        directory name.  This file system version of the test ID may
        have to shorten either the Test Name or the Variant ID.

        The first component of a Test ID, the numeric unique test id,
        AKA "uid", will be used as a an stable identifier between the
        Test ID and the file or directory created based on the return
        value of this method.  If the filesystem can not even
        represent the "uid", than an exception will be raised.

        For Test ID "001-mytest;foo", examples of shortened file
        system versions include "001-mytest;f" or "001-myte;foo".

        :raises: RuntimeError if the test ID cannot be converted to a
                 filesystem representation.
        """
        test_id = str(self)
        test_id_fs = astring.string_to_safe_path(test_id)
        if len(test_id) == len(test_id_fs):    # everything fits in
            return test_id_fs
        idx_fit_variant = len(test_id_fs) - len(self.str_variant)
        if idx_fit_variant > len(self.str_uid):     # full uid+variant
            return (test_id_fs[:idx_fit_variant] +
                    astring.string_to_safe_path(self.str_variant))
        elif len(self.str_uid) <= len(test_id_fs):   # full uid
            return astring.string_to_safe_path(self.str_uid + self.str_variant)
        else:       # not even uid could be stored in fs
            raise RuntimeError(f'Test ID is too long to be stored on the '
                               f'filesystem: "{self.str_uid}"\n'
                               f'Full Test ID: "{str(self)}"')
Esempio n. 2
0
 def test_safe_path(self):
     self.assertEqual(astring.string_to_safe_path('a<>:"/\\|\\?*b'),
                      "a__________b")
     self.assertEqual(astring.string_to_safe_path('..'), "_.")
     self.assertEqual(len(astring.string_to_safe_path(" " * 300)), 255)
     avocado = u'\u0430\u0432\u043e\u043a\u0430\u0434\xff<>'
     self.assertEqual(astring.string_to_safe_path(avocado),
                      "%s__" % avocado[:-2])
Esempio n. 3
0
 def test_safe_path(self):
     self.assertEqual(astring.string_to_safe_path('a<>:"/\\|\?*b'),
                      "a__________b")
     self.assertEqual(astring.string_to_safe_path('..'), "_.")
     self.assertEqual(len(astring.string_to_safe_path(" " * 300)), 255)
     avocado = u'\u0430\u0432\u043e\u043a\u0430\u0434\xff<>'
     self.assertEqual(astring.string_to_safe_path(avocado),
                      "%s__" % avocado[:-2])
Esempio n. 4
0
 def test_safe_path(self):
     self.assertEqual(astring.string_to_safe_path('a<>:"/\\|\\?*b'),
                      "a__________b")
     self.assertEqual(astring.string_to_safe_path('..'), "_.")
     name = " " * 300
     max_length = path.get_max_file_name_length(name)
     self.assertEqual(len(astring.string_to_safe_path(" " * 300)),
                      max_length)
     avocado = '\u0430\u0432\u043e\u043a\u0430\u0434\xff<>'
     self.assertEqual(astring.string_to_safe_path(avocado),
                      f"{avocado[:-2]}__")
Esempio n. 5
0
    def run_test(self, references, timeout):
        """
        Run tests.

        :param references: a string with test references.
        :return: a dictionary with test results.
        """
        def arg_to_dest(arg):
            """
            Turns long argparse arguments into default dest
            """
            return arg[2:].replace('-', '_')

        extra_params = []
        # bool or nargs
        for arg in ["--mux-yaml", "--dry-run",
                    "--filter-by-tags-include-empty"]:
            value = getattr(self.job.args, arg_to_dest(arg), None)
            if value is True:
                extra_params.append(arg)
            elif value:
                extra_params.append("%s %s" % (arg, " ".join(value)))
        # append
        for arg in ["--filter-by-tags"]:
            value = getattr(self.job.args, arg_to_dest(arg), None)
            if value:
                join = ' %s ' % arg
                extra_params.append("%s %s" % (arg, join.join(value)))

        references_str = " ".join(references)

        avocado_cmd = ('avocado run --force-job-id %s --json - '
                       '--archive %s %s' % (self.job.unique_id,
                                            references_str, " ".join(extra_params)))
        try:
            result = self.remote.run(avocado_cmd, ignore_status=True,
                                     timeout=timeout)
            if result.exit_status & exit_codes.AVOCADO_JOB_FAIL:
                raise exceptions.JobError("Remote execution failed with: %s" % result.stderr)

        except CommandTimeout:
            raise exceptions.JobError("Remote execution took longer than "
                                      "specified timeout (%s). Interrupting."
                                      % (timeout))

        try:
            json_result = self._parse_json_response(result.stdout)
        except:
            stacktrace.log_exc_info(sys.exc_info(),
                                    logger='avocado.app.debug')
            raise exceptions.JobError(result.stdout)

        for t_dict in json_result['tests']:
            logdir = os.path.join(self.job.logdir, 'test-results')
            relative_path = astring.string_to_safe_path(str(t_dict['id']))
            logdir = os.path.join(logdir, relative_path)
            t_dict['logdir'] = logdir
            t_dict['logfile'] = os.path.join(logdir, 'debug.log')

        return json_result
Esempio n. 6
0
    def run_test(self, references, timeout):
        """
        Run tests.

        :param references: a string with test references.
        :return: a dictionary with test results.
        """
        def arg_to_dest(arg):
            """
            Turns long argparse arguments into default dest
            """
            return arg[2:].replace('-', '_')

        extra_params = []
        # bool or nargs
        for arg in ["--mux-yaml", "--dry-run",
                    "--filter-by-tags-include-empty"]:
            value = getattr(self.job.args, arg_to_dest(arg), None)
            if value is True:
                extra_params.append(arg)
            elif value:
                extra_params.append("%s %s" % (arg, " ".join(value)))
        # append
        for arg in ["--filter-by-tags"]:
            value = getattr(self.job.args, arg_to_dest(arg), None)
            if value:
                join = ' %s ' % arg
                extra_params.append("%s %s" % (arg, join.join(value)))

        references_str = " ".join(references)

        avocado_cmd = ('avocado run --force-job-id %s --json - '
                       '--archive %s %s' % (self.job.unique_id,
                                            references_str, " ".join(extra_params)))
        try:
            result = self.remote.run(avocado_cmd, ignore_status=True,
                                     timeout=timeout)
            if result.exit_status & exit_codes.AVOCADO_JOB_FAIL:
                raise exceptions.JobError("Remote execution failed with: %s" % result.stderr)

        except CommandTimeout:
            raise exceptions.JobError("Remote execution took longer than "
                                      "specified timeout (%s). Interrupting."
                                      % (timeout))

        try:
            json_result = self._parse_json_response(result.stdout)
        except:
            stacktrace.log_exc_info(sys.exc_info(),
                                    logger='avocado.app.debug')
            raise exceptions.JobError(result.stdout)

        for t_dict in json_result['tests']:
            logdir = os.path.join(self.job.logdir, 'test-results')
            relative_path = astring.string_to_safe_path(str(t_dict['id']))
            logdir = os.path.join(logdir, relative_path)
            t_dict['logdir'] = logdir
            t_dict['logfile'] = os.path.join(logdir, 'debug.log')

        return json_result
Esempio n. 7
0
    def _get_datadir(self, source):
        path_components = self._data_sources_mapping.get(source)
        if path_components is None:
            return

        # evaluate lazily, needed when the class changes its own
        # information such as its datadir
        path_components = [func() for func in path_components]
        if None in path_components:
            return

        # if path components are absolute paths, let's believe that
        # they have already been treated (such as the entries that
        # return the self.datadir).  If not, let's split the path
        # components so that they can be treated in the next loop
        split_path_components = []
        for path_component in path_components:
            if not os.path.isabs(path_component):
                split_path_components += path_component.split(os.path.sep)
            else:
                split_path_components.append(path_component)

        # now, make sure each individual path component can be represented
        # in the filesystem.  again, if it's an absolute path, do nothing
        paths = []
        for path in split_path_components:
            if os.path.isabs(path):
                paths.append(path)
            else:
                paths.append(astring.string_to_safe_path(path))

        return os.path.join(*paths)
Esempio n. 8
0
 def test_uid_name_no_digits(self):
     uid = 1
     name = 'file.py:klass.test_method'
     test_id = TestID(uid, name, no_digits=2)
     self.assertEqual(test_id.uid, 1)
     self.assertEqual(test_id.str_uid, '01')
     self.assertEqual(test_id.str_filesystem,
                      astring.string_to_safe_path(f"{'01'}-{name}"))
     self.assertIs(test_id.variant, None)
     self.assertIs(test_id.str_variant, '')
Esempio n. 9
0
 def test_uid_name(self):
     uid = 1
     name = "file.py:klass.test_method"
     test_id = TestID(uid, name)
     self.assertEqual(test_id.uid, 1)
     self.assertEqual(test_id.str_uid, "1")
     self.assertEqual(test_id.str_filesystem,
                      astring.string_to_safe_path(f"{uid}-{name}"))
     self.assertIs(test_id.variant, None)
     self.assertIs(test_id.str_variant, "")
Esempio n. 10
0
 def test_uid_name(self):
     uid = 1
     name = 'file.py:klass.test_method'
     test_id = TestID(uid, name)
     self.assertEqual(test_id.uid, 1)
     self.assertEqual(test_id.str_uid, '1')
     self.assertEqual(test_id.str_filesystem,
                      astring.string_to_safe_path(f'{uid}-{name}'))
     self.assertIs(test_id.variant, None)
     self.assertIs(test_id.str_variant, '')
Esempio n. 11
0
 def test_uid_name_no_digits(self):
     uid = 1
     name = 'file.py:klass.test_method'
     test_id = test.TestID(uid, name, no_digits=2)
     self.assertEqual(test_id.uid, 1)
     self.assertEqual(test_id.str_uid, '01')
     self.assertEqual(test_id.str_filesystem,
                      astring.string_to_safe_path('%s-%s' % ('01', name)))
     self.assertIs(test_id.variant, None)
     self.assertIs(test_id.str_variant, '')
Esempio n. 12
0
    def run_test(self, references, timeout):
        """
        Run tests.

        :param references: a string with test references.
        :return: a dictionary with test results.
        """
        extra_params = []
        mux_files = getattr(self.job.args, 'mux_yaml') or []
        if mux_files:
            extra_params.append("-m %s" % " ".join(mux_files))

        if getattr(self.job.args, "dry_run", False):
            extra_params.append("--dry-run")
        references_str = " ".join(references)

        avocado_cmd = (
            'avocado run --force-job-id %s --json - '
            '--archive %s %s' %
            (self.job.unique_id, references_str, " ".join(extra_params)))
        try:
            result = self.remote.run(avocado_cmd,
                                     ignore_status=True,
                                     timeout=timeout)
            if result.exit_status & exit_codes.AVOCADO_JOB_FAIL:
                raise exceptions.JobError("Remote execution failed with: %s" %
                                          result.stderr)

        except CommandTimeout:
            raise exceptions.JobError("Remote execution took longer than "
                                      "specified timeout (%s). Interrupting." %
                                      (timeout))

        try:
            json_result = self._parse_json_response(result.stdout)
        except:
            stacktrace.log_exc_info(sys.exc_info(), logger='avocado.debug')
            raise exceptions.JobError(result.stdout)

        for t_dict in json_result['tests']:
            logdir = os.path.join(self.job.logdir, 'test-results')
            relative_path = astring.string_to_safe_path(str(t_dict['test']))
            logdir = os.path.join(logdir, relative_path)
            t_dict['logdir'] = logdir
            t_dict['logfile'] = os.path.join(logdir, 'debug.log')

        return json_result
Esempio n. 13
0
 def test_kill_stopped_sleep(self):
     sleep = process.run("which sleep", ignore_status=True, shell=True)
     if sleep.exit_status:
         self.skipTest("Sleep binary not found in PATH")
     sleep = "'%s 60'" % sleep.stdout.strip()
     proc = aexpect.Expect("./scripts/avocado run %s --job-results-dir %s "
                           "--sysinfo=off --job-timeout 3" %
                           (sleep, self.tmpdir))
     proc.read_until_output_matches(["\(1/1\)"],
                                    timeout=3,
                                    internal_timeout=0.01)
     # We need pid of the avocado, not the shell executing it
     pid = int(process.get_children_pids(proc.get_pid())[0])
     os.kill(pid, signal.SIGTSTP)  # This freezes the process
     deadline = time.time() + 9
     while time.time() < deadline:
         if not proc.is_alive():
             break
         time.sleep(0.1)
     else:
         proc.kill(signal.SIGKILL)
         self.fail("Avocado process still alive 5s after job-timeout:\n%s" %
                   proc.get_output())
     output = proc.get_output()
     self.assertIn(
         "ctrl+z pressed, stopping test", output, "SIGTSTP "
         "message not in the output, test was probably not "
         "stopped.")
     self.assertIn(
         "TIME", output, "TIME not in the output, avocado "
         "probably died unexpectadly")
     self.assertEqual(proc.get_status(), 8, "Avocado did not finish with "
                      "1.")
     sleep_dir = astring.string_to_safe_path("1-" + sleep[1:-1])
     debug_log = os.path.join(self.tmpdir, "latest", "test-results",
                              sleep_dir, "debug.log")
     debug_log = open(debug_log).read()
     self.assertIn(
         "Runner error occurred: Timeout reached", debug_log,
         "Runner error occurred: Timeout reached message not "
         "in the test's debug.log:\n%s" % debug_log)
     self.assertNotIn(
         "Traceback (most recent", debug_log, "Traceback "
         "present in the test's debug.log file, but it was "
         "suppose to be stopped and unable to produce it.\n"
         "%s" % debug_log)
Esempio n. 14
0
def record(job, cmdline=None):
    """
    Records all required job information.
    """
    base_dir = init_dir(job.logdir, JOB_DATA_DIR)
    path_cfg = os.path.join(base_dir, CONFIG_FILENAME)
    path_references = os.path.join(base_dir, TEST_REFERENCES_FILENAME)
    path_pwd = os.path.join(base_dir, PWD_FILENAME)
    path_job_config = os.path.join(base_dir, JOB_CONFIG_FILENAME)
    path_cmdline = os.path.join(base_dir, CMDLINE_FILENAME)

    references = job.config.get('resolver.references')
    if references:
        with open(path_references, 'w', encoding='utf-8') as references_file:
            references_file.write(f'{references}')
            references_file.flush()
            os.fsync(references_file)

    with open(path_cfg, 'w', encoding='utf-8') as config_file:
        settings.config.write(config_file)
        config_file.flush()
        os.fsync(config_file)

    for idx, suite in enumerate(job.test_suites, 1):
        if suite.name:
            suite_var_name = f"variants-{idx}-{suite.name}.json"
        else:
            suite_var_name = f"variants-{idx}.json"
        suite_var_name = string_to_safe_path(suite_var_name)
        path_suite_variant = os.path.join(base_dir, suite_var_name)
        record_suite_variant(path_suite_variant, suite)

    with open(path_pwd, 'w', encoding='utf-8') as pwd_file:
        pwd_file.write(f'{os.getcwd()}')
        pwd_file.flush()
        os.fsync(pwd_file)

    with open(path_job_config, 'w', encoding='utf-8') as job_config_file:
        json.dump(job.config, job_config_file, cls=ConfigEncoder)
        job_config_file.flush()
        os.fsync(job_config_file)

    with open(path_cmdline, 'w', encoding='utf-8') as cmdline_file:
        cmdline_file.write(f'{cmdline}')
        cmdline_file.flush()
        os.fsync(cmdline_file)
Esempio n. 15
0
    def _setup_job_category(self):
        """
        This has to be called after self.logdir has been defined

        It attempts to create a directory one level up from the job results,
        with the given category name.  Then, a symbolic link is created to
        this job results directory.

        This should allow a user to look at a single directory for all
        jobs of a given category.
        """
        category = self.config.get("run.job_category")
        if category is None:
            return

        if category != astring.string_to_safe_path(category):
            msg = (
                f"Unable to set category in job results: name is not "
                f"filesystem safe: {category}"
            )
            LOG_UI.warning(msg)
            LOG_JOB.warning(msg)
            return

        # we could also get "base_logdir" from config, but I believe this is
        # the best choice because it reduces the dependency surface (depends
        # only on self.logdir)
        category_path = os.path.join(os.path.dirname(self.logdir), category)
        try:
            os.mkdir(category_path)
        except FileExistsError:
            pass

        try:
            os.symlink(
                os.path.relpath(self.logdir, category_path),
                os.path.join(category_path, os.path.basename(self.logdir)),
            )
        except NotImplementedError:
            msg = f"Unable to link this job to category {category}"
            LOG_UI.warning(msg)
            LOG_JOB.warning(msg)
        except OSError:
            msg = f"Permission denied to link this job to category {category}"
            LOG_UI.warning(msg)
            LOG_JOB.warning(msg)
Esempio n. 16
0
    def run_test(self, references, timeout):
        """
        Run tests.

        :param references: a string with test references.
        :return: a dictionary with test results.
        """
        extra_params = []
        mux_files = getattr(self.job.args, 'mux_yaml', [])
        if mux_files:
            extra_params.append("-m %s" % " ".join(mux_files))

        if getattr(self.job.args, "dry_run", False):
            extra_params.append("--dry-run")
        references_str = " ".join(references)

        avocado_cmd = ('avocado run --force-job-id %s --json - '
                       '--archive %s %s' % (self.job.unique_id,
                                            references_str, " ".join(extra_params)))
        try:
            result = self.remote.run(avocado_cmd, ignore_status=True,
                                     timeout=timeout)
            if result.exit_status & exit_codes.AVOCADO_JOB_FAIL:
                raise exceptions.JobError("Remote execution failed with: %s" % result.stderr)

        except CommandTimeout:
            raise exceptions.JobError("Remote execution took longer than "
                                      "specified timeout (%s). Interrupting."
                                      % (timeout))

        try:
            json_result = self._parse_json_response(result.stdout)
        except:
            stacktrace.log_exc_info(sys.exc_info(), logger='avocado.debug')
            raise exceptions.JobError(result.stdout)

        for t_dict in json_result['tests']:
            logdir = os.path.join(self.job.logdir, 'test-results')
            relative_path = astring.string_to_safe_path(str(t_dict['id']))
            logdir = os.path.join(logdir, relative_path)
            t_dict['logdir'] = logdir
            t_dict['logfile'] = os.path.join(logdir, 'debug.log')

        return json_result
Esempio n. 17
0
 def test_kill_stopped_sleep(self):
     sleep = process.run("which sleep", ignore_status=True, shell=True)
     if sleep.exit_status:
         self.skipTest("Sleep binary not found in PATH")
     sleep = "'%s 60'" % sleep.stdout.strip()
     proc = aexpect.Expect("./scripts/avocado run %s --job-results-dir %s "
                           "--sysinfo=off --job-timeout 3"
                           % (sleep, self.tmpdir))
     proc.read_until_output_matches(["\(1/1\)"], timeout=3,
                                    internal_timeout=0.01)
     # We need pid of the avocado, not the shell executing it
     pid = int(process.get_children_pids(proc.get_pid())[0])
     os.kill(pid, signal.SIGTSTP)   # This freezes the process
     deadline = time.time() + 9
     while time.time() < deadline:
         if not proc.is_alive():
             break
         time.sleep(0.1)
     else:
         proc.kill(signal.SIGKILL)
         self.fail("Avocado process still alive 5s after job-timeout:\n%s"
                   % proc.get_output())
     output = proc.get_output()
     self.assertIn("ctrl+z pressed, stopping test", output, "SIGTSTP "
                   "message not in the output, test was probably not "
                   "stopped.")
     self.assertIn("TIME", output, "TIME not in the output, avocado "
                   "probably died unexpectadly")
     self.assertEqual(proc.get_status(), 8, "Avocado did not finish with "
                      "1.")
     sleep_dir = astring.string_to_safe_path("1-" + sleep[1:-1])
     debug_log = os.path.join(self.tmpdir, "latest", "test-results",
                              sleep_dir, "debug.log")
     debug_log = open(debug_log).read()
     self.assertIn("Runner error occurred: Timeout reached", debug_log,
                   "Runner error occurred: Timeout reached message not "
                   "in the test's debug.log:\n%s" % debug_log)
     self.assertNotIn("Traceback (most recent", debug_log, "Traceback "
                      "present in the test's debug.log file, but it was "
                      "suppose to be stopped and unable to produce it.\n"
                      "%s" % debug_log)
Esempio n. 18
0
    def stream_output(job_id, task_id):
        """Returns output files streams in binary mode from a task.

        This method will find for output files generated by a task and will
        return a generator with tuples, each one containing a filename and
        bytes.

        You need to provide in your spawner a `stream_output()` method if this
        one is not suitable for your spawner. i.e: if the spawner is trying to
        access a remote output file.
        """
        results_dir = get_job_results_dir(job_id)
        task_id = string_to_safe_path(task_id)
        data_pointer = '{}/test-results/{}/data'.format(results_dir, task_id)
        src = open(data_pointer, 'r').readline().rstrip()
        try:
            for path in Path(src).expanduser().iterdir():
                if path.is_file() and path.stat().st_size != 0:
                    for stream in SpawnerMixin.bytes_from_file(str(path)):
                        yield (path.name, stream)
        except FileNotFoundError as e:
            raise SpawnerException("Task not found: {}".format(e))
Esempio n. 19
0
 def __init__(self, log_path):
     self.log_path = astring.string_to_safe_path(log_path)
     self._name = os.path.basename(log_path)