Example #1
0
    def __init__(
        self,
        path=None,
        port=None,
        wait_until_running=True,
        *extra_args,
    ):
        """
        Initializes a new gdbserver instance

        :param path: location of the gdbserver binary
        :type path: str
        :param port: tcp port number to listen on for incoming connections
        :type port: int
        :param wait_until_running: wait until the gdbserver is running and
                                   accepting connections. It may take a little
                                   after the process is started and it is
                                   actually bound to the allocated port
        :type wait_until_running: bool
        :param extra_args: optional extra arguments to be passed to gdbserver
        """
        if path is None:
            path = find_command("gdbserver", default="/usr/bin/gdbserver")
        self.path = path
        args = [self.path]
        args += self.REQUIRED_ARGS

        if port is None:
            self.port = ports.find_free_port(*self.PORT_RANGE)
        else:
            self.port = port
        args.append(f":{self.port}")

        prefix = f"avocado_gdbserver_{self.port}_"
        _, self.stdout_path = tempfile.mkstemp(prefix=prefix + "stdout_")
        self.stdout = open(self.stdout_path, "w", encoding="utf-8")
        _, self.stderr_path = tempfile.mkstemp(prefix=prefix + "stderr_")
        self.stderr = open(self.stderr_path, "w", encoding="utf-8")

        try:
            self.process = subprocess.Popen(
                args,
                stdin=subprocess.PIPE,
                stdout=self.stdout,
                stderr=self.stderr,
                close_fds=True,
            )
        except OSError as details:
            if details.errno == 2:
                exc = OSError(f"File '{args[0]}' not found")
                exc.errno = 2
                raise exc
            else:
                raise

        if wait_until_running:
            self._wait_until_running()
Example #2
0
 def test_custom_exit_codes(self):
     status_server = "127.0.0.1:%u" % find_free_port()
     config = {
         'run.references': ['/bin/false'],
         'run.test_runner': 'nrunner',
         'runner.exectest.exitcodes.skip': [1],
         'nrunner.status_server_listen': status_server,
         'nrunner.status_server_uri': status_server,
         'run.keep_tmp': True
     }
     with Job.from_config(job_config=config) as job:
         self.assertEqual(job.run(), 0)
Example #3
0
    def test_sleep_longer_timeout(self):
        status_server = '127.0.0.1:%u' % find_free_port()
        config = {'run.references': [self.script.path],
                  'nrunner.status_server_listen': status_server,
                  'nrunner.status_server_uri': status_server,
                  'run.results_dir': self.tmpdir.name,
                  'run.keep_tmp': True,
                  'task.timeout.running': 2,
                  'run.test_runner': 'nrunner'}

        with Job.from_config(job_config=config) as job:
            job.run()

        self.assertEqual(1, job.result.skipped)
        self.assertEqual(0, job.result.passed)
Example #4
0
 def test_failfast(self):
     status_server = "127.0.0.1:%u" % find_free_port()
     config = {
         'run.references':
         ['/bin/true', '/bin/false', '/bin/true', '/bin/true'],
         'run.test_runner': 'nrunner',
         'run.failfast': True,
         'nrunner.shuffle': False,
         'nrunner.status_server_listen': status_server,
         'nrunner.status_server_uri': status_server,
         'nrunner.max_parallel_tasks': 1
     }
     with Job.from_config(job_config=config) as job:
         self.assertEqual(job.run(), 9)
         self.assertEqual(job.result.passed, 1)
         self.assertEqual(job.result.errors, 0)
         self.assertEqual(job.result.failed, 1)
         self.assertEqual(job.result.skipped, 2)
Example #5
0
 def test_is_port_free(self):
     port = ports.find_free_port(sequent=False)
     self.assertTrue(ports.is_port_free(port, "localhost"))
     local_addrs = get_all_local_addrs()
     ipv4_addrs = ["localhost", ""] + list(local_addrs[0])
     ipv6_addrs = ["localhost", ""] + list(local_addrs[1])
     good = []
     bad = []
     skip = []
     sock = None
     for family in ports.FAMILIES:
         if family == socket.AF_INET:
             addrs = ipv4_addrs
         else:
             addrs = ipv6_addrs
         for addr in addrs:
             for protocol in ports.PROTOCOLS:
                 try:
                     sock = socket.socket(family, protocol)
                     sock.bind((addr, port))
                     if ports.is_port_free(port, "localhost"):
                         bad.append("%s, %s, %s: reports free" %
                                    (family, protocol, addr))
                     else:
                         good.append("%s, %s, %s" %
                                     (family, protocol, addr))
                 except Exception as exc:
                     if getattr(exc, 'errno', None) in (-2, 2, 22, 94):
                         skip.append("%s, %s, %s: Not supported: %s" %
                                     (family, protocol, addr, exc))
                     else:
                         bad.append("%s, %s, %s: Failed to bind: %s" %
                                    (family, protocol, addr, exc))
                 finally:
                     if sock is not None:
                         sock.close()
     self.assertFalse(
         bad, "Following combinations failed:\n%s\n\n"
         "Following combinations passed:\n%s\n\n"
         "Following combinations were skipped:\n%s" %
         ("\n".join(bad), "\n".join(good), "\n".join(skip)))
def run(test, params, env):
    """
    Ansible playbook basic test:
    1) Check ansible package exists
    2) Launch the guest
    3) Clone an ansible playbook repo
    4) Generate the ansible-playbook command
    5) Execute the playbook and verify the return status

    :param test: QEMU test object.
    :param params: Dictionary with the test parameters.
    :param env: Dictionary with test environment.
    """

    guest_user = params["username"]
    guest_passwd = params["password"]
    step_time = params.get_numeric("step_time", 60)
    ansible_callback_plugin = params.get("ansible_callback_plugin")
    ansible_addl_opts = params.get("ansible_addl_opts", "")
    ansible_ssh_extra_args = params["ansible_ssh_extra_args"]
    ansible_extra_vars = params.get("ansible_extra_vars", "{}")
    playbook_repo = params["playbook_repo"]
    playbook_timeout = params.get_numeric("playbook_timeout")
    playbook_dir = params.get("playbook_dir",
                              os.path.join(test.workdir, "ansible_playbook"))
    toplevel_playbook = os.path.join(playbook_dir, params["toplevel_playbook"])
    # Use this directory to copy some logs back from the guest
    test_harness_log_dir = test.logdir

    # Responsive migration specific parameters
    mq_listen_port = params.get_numeric("mq_listen_port", find_free_port())
    wait_response_timeout = params.get_numeric("wait_response_timeout", 600)

    vms = env.get_all_vms()
    guest_ip_list = []
    for vm in vms:
        vm.verify_alive()
        vm.wait_for_login()
        guest_ip_list.append(vm.get_address())

    logging.info("Cloning %s", playbook_repo)
    process.run("git clone {src} {dst}".format(src=playbook_repo,
                                               dst=playbook_dir), verbose=False)

    error_context.base_context("Generate playbook related options.",
                               logging.info)
    extra_vars = {"ansible_ssh_extra_args": ansible_ssh_extra_args,
                  "ansible_ssh_pass": guest_passwd,
                  "mq_port": mq_listen_port,
                  "test_harness_log_dir": test_harness_log_dir}
    extra_vars.update(json.loads(ansible_extra_vars))

    error_context.context("Execute the ansible playbook.", logging.info)
    playbook_executor = ansible.PlaybookExecutor(
        inventory="{},".format(",".join(guest_ip_list)),
        site_yml=toplevel_playbook,
        remote_user=guest_user,
        extra_vars=json.dumps(extra_vars),
        callback_plugin=ansible_callback_plugin,
        addl_opts=ansible_addl_opts
    )

    mq_publisher = message_queuing.MQPublisher(mq_listen_port)
    try:
        error_context.base_context('Confirm remote subscriber has accessed to '
                                   'activate migrating guests.', logging.info)
        try:
            mq_publisher.confirm_access(wait_response_timeout)
        except message_queuing.MessageNotFoundError as err:
            logging.error(err)
            test.fail("Failed to capture the 'ACCESS' message.")
        logging.info("Already captured the 'ACCESS' message.")

        error_context.context("Migrate guests after subscriber accessed.",
                              logging.info)
        for vm in vms:
            vm.migrate()
    except VMMigrateFailedError:
        error_context.context("Send the 'ALERT' message to notify the remote "
                              "subscriber to stop the test.", logging.info)
        mq_publisher.alert()
        raise
    else:
        error_context.context("Send the 'APPROVE' message to notify the remote "
                              "subscriber to continue the test.", logging.info)
        mq_publisher.approve()
    finally:
        ansible_log = "ansible_playbook.log"
        try:
            playbook_executor.wait_for_completed(playbook_timeout, step_time)
        except ansible.ExecutorTimeoutError as err:
            test.error(str(err))
        else:
            if playbook_executor.get_status() != 0:
                test.fail("Ansible playbook execution failed, please check the "
                          "{} for details.".format(ansible_log))
            logging.info("Ansible playbook execution passed.")
        finally:
            playbook_executor.store_playbook_log(test_harness_log_dir,
                                                 ansible_log)
            playbook_executor.close()
            mq_publisher.close()
Example #7
0
 def test_find_free_port(self):
     port = ports.find_free_port(sequent=False)
     self.assertEqual(type(port), int)
Example #8
0
 def test_is_port_available(self):
     port = ports.find_free_port(sequent=False)
     result = ports.is_port_available(port, "localhost")
     self.assertTrue(result)
Example #9
0
#!/usr/bin/env python3

import sys

from avocado.core.job import Job
from avocado.core.suite import TestSuite
from avocado.utils.network.ports import find_free_port

status_server = '127.0.0.1:%u' % find_free_port()

config = {
    'run.test_runner':
    'nrunner',
    'nrunner.status_server_listen':
    status_server,
    'nrunner.status_server_uri':
    status_server,
    'run.references': [
        'selftests/unit/test_resolver.py',
        'selftests/functional/test_argument_parsing.py',
        '/bin/true',
    ],
}

suite = TestSuite.from_config(config)
with Job(config, [suite]) as j:
    sys.exit(j.run())
Example #10
0
    def reverse_debugging(self, shift=7, args=None):
        logger = logging.getLogger('replay')

        # create qcow2 for snapshots
        logger.info('creating qcow2 image for VM snapshots')
        image_path = os.path.join(self.workdir, 'disk.qcow2')
        qemu_img = os.path.join(BUILD_DIR, 'qemu-img')
        if not os.path.exists(qemu_img):
            qemu_img = find_command('qemu-img', False)
        if qemu_img is False:
            self.cancel('Could not find "qemu-img", which is required to '
                        'create the temporary qcow2 image')
        cmd = '%s create -f qcow2 %s 128M' % (qemu_img, image_path)
        process.run(cmd)

        replay_path = os.path.join(self.workdir, 'replay.bin')
        port = find_free_port()

        # record the log
        vm = self.run_vm(True, shift, args, replay_path, image_path, port)
        while self.vm_get_icount(vm) <= self.STEPS:
            pass
        last_icount = self.vm_get_icount(vm)
        vm.shutdown()

        logger.info("recorded log with %s+ steps" % last_icount)

        # replay and run debug commands
        vm = self.run_vm(False, shift, args, replay_path, image_path, port)
        logger.info('connecting to gdbstub')
        g = gdb.GDBRemote('127.0.0.1', port, False, False)
        g.connect()
        r = g.cmd(b'qSupported')
        if b'qXfer:features:read+' in r:
            g.cmd(b'qXfer:features:read:target.xml:0,ffb')
        if b'ReverseStep+' not in r:
            self.fail('Reverse step is not supported by QEMU')
        if b'ReverseContinue+' not in r:
            self.fail('Reverse continue is not supported by QEMU')

        logger.info('stepping forward')
        steps = []
        # record first instruction addresses
        for _ in range(self.STEPS):
            pc = self.get_pc(g)
            logger.info('saving position %x' % pc)
            steps.append(pc)
            self.gdb_step(g)

        # visit the recorded instruction in reverse order
        logger.info('stepping backward')
        for addr in steps[::-1]:
            self.gdb_bstep(g)
            self.check_pc(g, addr)
            logger.info('found position %x' % addr)

        logger.info('seeking to the end (icount %s)' % (last_icount - 1))
        vm.qmp('replay-break', icount=last_icount - 1)
        # continue - will return after pausing
        g.cmd(b'c', b'T02thread:01;')

        logger.info('setting breakpoints')
        for addr in steps:
            # hardware breakpoint at addr with len=1
            g.cmd(b'Z1,%x,1' % addr, b'OK')

        logger.info('running reverse continue to reach %x' % steps[-1])
        # reverse continue - will return after stopping at the breakpoint
        g.cmd(b'bc', b'T05thread:01;')

        # assume that none of the first instructions is executed again
        # breaking the order of the breakpoints
        self.check_pc(g, steps[-1])
        logger.info('successfully reached %x' % steps[-1])

        logger.info('exitting gdb and qemu')
        vm.shutdown()
Example #11
0
class BasicTest(TestCaseTmpDir):

    status_server = '127.0.0.1:%u' % find_free_port()
    command = ('%s run --test-runner=nrunner'
               ' --nrunner-status-server-uri %s'
               ' --nrunner-status-server-listen %s %s')
    skip_install_message = ("This test runs on CI environments only as it"
                            " installs packages to test the feature, which"
                            " may not be desired locally, in the user's"
                            " system.")
    skip_package_manager_message = ("This test runs on CI environments only"
                                    " as it depends on the system package"
                                    " manager, and some environments don't"
                                    " have it available.")

    @unittest.skipUnless(os.getenv('CI'), skip_package_manager_message)
    def test_single_success(self):
        with script.Script(
                os.path.join(self.tmpdir.name, 'test_single_success.py'),
                SINGLE_SUCCESS_CHECK) as test:
            command = self.command % (AVOCADO, self.status_server,
                                      self.status_server, test.path)
            result = process.run(command, ignore_status=True)
            self.assertEqual(result.exit_status, exit_codes.AVOCADO_ALL_OK)
            self.assertIn(
                'PASS 1',
                result.stdout_text,
            )
            self.assertNotIn(
                'bash',
                result.stdout_text,
            )

    @unittest.skipUnless(os.getenv('CI'), skip_package_manager_message)
    def test_single_fail(self):
        with script.Script(
                os.path.join(self.tmpdir.name, 'test_single_fail.py'),
                SINGLE_FAIL_CHECK) as test:
            command = self.command % (AVOCADO, self.status_server,
                                      self.status_server, test.path)
            result = process.run(command, ignore_status=True)
            self.assertEqual(result.exit_status, exit_codes.AVOCADO_ALL_OK)
            self.assertIn(
                'PASS 0',
                result.stdout_text,
            )
            self.assertIn(
                'SKIP 1',
                result.stdout_text,
            )
            self.assertNotIn(
                '-foo-bar-',
                result.stdout_text,
            )

    @unittest.skip('Skipping until test collision is fixed (#4620).')
    @unittest.skipUnless(os.getenv('CI'), skip_install_message)
    def test_multiple_success(self):
        with script.Script(
                os.path.join(self.tmpdir.name, 'test_multiple_success.py'),
                MULTIPLE_SUCCESS) as test:
            command = self.command % (AVOCADO, self.status_server,
                                      self.status_server, test.path)
            result = process.run(command, ignore_status=True)
            self.assertEqual(result.exit_status, exit_codes.AVOCADO_ALL_OK)
            self.assertIn(
                'PASS 3',
                result.stdout_text,
            )
            self.assertNotIn(
                'vim-common',
                result.stdout_text,
            )

    @unittest.skip('Skipping until test collision is fixed (#4620).')
    @unittest.skipUnless(os.getenv('CI'), skip_install_message)
    def test_multiple_fails(self):
        with script.Script(
                os.path.join(self.tmpdir.name, 'test_multiple_fail.py'),
                MULTIPLE_FAIL) as test:
            command = self.command % (AVOCADO, self.status_server,
                                      self.status_server, test.path)
            result = process.run(command, ignore_status=True)
            self.assertEqual(result.exit_status, exit_codes.AVOCADO_ALL_OK)
            self.assertIn(
                'PASS 1',
                result.stdout_text,
            )
            self.assertIn(
                'SKIP 2',
                result.stdout_text,
            )
            self.assertNotIn(
                '-foo-bar-',
                result.stdout_text,
            )

    def tearDown(self):
        self.tmpdir.cleanup()
Example #12
0
def create_suites(args):
    test_class = 'JobAPIFeaturesTest'
    suites = []

    # ========================================================================
    # Test if the archive file was created
    # ========================================================================
    check_archive_file_exists = ('%s:%s.test_check_archive_file_exists'
                                 % (__file__, test_class))
    config_check_archive_file_exists = {
        'run.references': [check_archive_file_exists],
        'run.test_runner': 'runner',
        'run.dict_variants': [
            {'namespace': 'run.results.archive',
             'value': True,
             'assert': True},
        ]
    }

    suites.append(TestSuite.from_config(config_check_archive_file_exists,
                                        "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if the category directory was created
    # ========================================================================
    check_category_directory_exists = (
        '%s:%s.test_check_category_directory_exists'
        % (__file__, test_class))
    config_check_category_directory_exists = {
        'run.references': [check_category_directory_exists],
        'run.test_runner': 'runner',
        'run.dict_variants': [
            {'namespace': 'run.job_category',
             'value': 'foo',
             'assert': True},
        ]
    }

    suites.append(TestSuite.from_config(config_check_category_directory_exists,
                                        "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if a directory was created
    # ========================================================================
    check_directory_exists = ('%s:%s.test_check_directory_exists'
                              % (__file__, test_class))
    config_check_directory_exists = {
        'run.references': [check_directory_exists],
        'run.test_runner': 'runner',
        'run.dict_variants': [
             {'namespace': 'sysinfo.collect.enabled',
              'value': True,
              'directory': 'sysinfo',
              'assert': True},

             {'namespace': 'sysinfo.collect.enabled',
              'value': False,
              'directory': 'sysinfo',
              'assert': False},
        ]
    }

    suites.append(TestSuite.from_config(config_check_directory_exists,
                                        "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test the content of a file
    # ========================================================================
    check_file_content = ('%s:%s.test_check_file_content'
                          % (__file__, test_class))
    config_check_file_content = {
        'run.references': [check_file_content],
        'run.test_runner': 'runner',
        'run.dict_variants': [
            # finding the correct 'content' here is trick because any
            # simple string is added to the variant file name and is
            # found in the log file.
            # Using DEBUG| makes the variant name have DEBUG_, working
            # fine here.
            {'namespace': 'job.output.loglevel',
             'value': 'INFO',
             'file': 'job.log',
             'content': 'DEBUG| Test metadata',
             'assert': False},

            {'namespace': 'job.run.result.tap.include_logs',
             'value': True,
             'file': 'results.tap',
             'content': "Command '/bin/true' finished with 0",
             'assert': True},

            {'namespace': 'job.run.result.tap.include_logs',
             'value': False,
             'file': 'results.tap',
             'content': "Command '/bin/true' finished with 0",
             'assert': False},

            {'namespace': 'job.run.result.xunit.job_name',
             'value': 'foo',
             'file': 'results.xml',
             'content': 'name="foo"',
             'assert': True},

            {'namespace': 'job.run.result.xunit.max_test_log_chars',
             'value': 1,
             'file': 'results.xml',
             'content': '--[ CUT DUE TO XML PER TEST LIMIT ]--',
             'assert': True,
             'reference': ['/bin/false'],
             'exit_code': 1},

            {'namespace': 'run.failfast',
             'value': True,
             'file': 'results.json',
             'content': '"skip": 1',
             'assert': True,
             'reference': ['/bin/false', '/bin/true'],
             'exit_code': 9},

            {'namespace': 'run.ignore_missing_references',
             'value': 'on',
             'file': 'results.json',
             'content': '"pass": 1',
             'assert': True,
             'reference': ['/bin/true', 'foo']},

            {'namespace': 'run.unique_job_id',
             'value': 'abcdefghi',
             'file': 'job.log',
             'content': 'Job ID: abcdefghi',
             'assert': True},

            {'namespace': 'job.run.timeout',
             'value': 1,
             'reference': ['examples/tests/sleeptenmin.py'],
             'file': 'job.log',
             'content': 'RuntimeError: Test interrupted by SIGTERM',
             'assert': True,
             'exit_code': 8},
        ]
    }

    suites.append(TestSuite.from_config(config_check_file_content,
                                        "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if the result file was created
    # ========================================================================
    check_file_exists = ('%s:%s.test_check_file_exists'
                         % (__file__, test_class))
    config_check_file_exists = {
        'run.references': [check_file_exists],
        'run.test_runner': 'runner',
        'run.dict_variants': [
            {'namespace': 'job.run.result.json.enabled',
             'value': True,
             'file': 'results.json',
             'assert': True},

            {'namespace': 'job.run.result.json.enabled',
             'value': False,
             'file': 'results.json',
             'assert': False},

            {'namespace': 'job.run.result.tap.enabled',
             'value': True,
             'file': 'results.tap',
             'assert': True},

            {'namespace': 'job.run.result.tap.enabled',
             'value': False,
             'file': 'results.tap',
             'assert': False},

            {'namespace': 'job.run.result.xunit.enabled',
             'value': True,
             'file': 'results.xml',
             'assert': True},

            {'namespace': 'job.run.result.xunit.enabled',
             'value': False,
             'file': 'results.xml',
             'assert': False},

            {'namespace': 'run.dry_run.enabled',
             'value': True,
             'file': 'job.log',
             'assert': False},

            {'namespace': 'run.dry_run.no_cleanup',
             'value': True,
             'file': 'job.log',
             'assert': True},

            {'namespace': 'plugins.disable',
             'value': ['result.xunit'],
             'file': 'result.xml',
             'assert': False},

            # this test needs a huge improvement
            {'namespace': 'run.journal.enabled',
             'value': True,
             'file': '.journal.sqlite',
             'assert': True},
        ]
    }

    if 'html' not in args.disable_plugin_checks:

        config_check_file_exists['run.dict_variants'].append(
            {'namespace': 'job.run.result.html.enabled',
             'value': True,
             'file': 'results.html',
             'assert': True})

        config_check_file_exists['run.dict_variants'].append(
            {'namespace': 'job.run.result.html.enabled',
             'value': False,
             'file': 'results.html',
             'assert': False})

    suites.append(TestSuite.from_config(config_check_file_exists,
                                        "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if a file was created
    # ========================================================================
    check_output_file = ('%s:%s.test_check_output_file'
                         % (__file__, test_class))
    config_check_output_file = {
        'run.references': [check_output_file],
        'run.test_runner': 'runner',
        'run.dict_variants': [
            {'namespace': 'job.run.result.json.output',
             'file': 'custom.json',
             'assert': True},

            # https://github.com/avocado-framework/avocado/issues/4034
            {'namespace': 'job.run.result.tap.output',
             'file': 'custom.tap',
             'assert': True},

            {'namespace': 'job.run.result.xunit.output',
             'file': 'custom.xml',
             'assert': True},
        ]
    }

    if 'html' not in args.disable_plugin_checks:

        config_check_output_file['run.dict_variants'].append(
            {'namespace': 'job.run.result.html.output',
             'file': 'custom.html',
             'assert': True})

    suites.append(TestSuite.from_config(config_check_output_file,
                                        "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if the temporary directory was created
    # ========================================================================
    check_tmp_directory_exists = ('%s:%s.test_check_tmp_directory_exists'
                                  % (__file__, test_class))
    config_check_tmp_directory_exists = {
        'run.references': [check_tmp_directory_exists],
        'run.test_runner': 'runner',
        'run.dict_variants': [
            {'namespace': 'run.keep_tmp',
             'value': True,
             'assert': True},
        ]
    }

    suites.append(TestSuite.from_config(config_check_tmp_directory_exists,
                                        "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Run nrunner interface checks for all available runners
    # ========================================================================
    config_nrunner_interface = {
        'run.references': ['selftests/functional/test_nrunner_interface.py'],
        'run.dict_variants': [
            {'runner': 'avocado-runner'},
            {'runner': 'avocado-runner-noop'},
            {'runner': 'avocado-runner-exec'},
            {'runner': 'avocado-runner-exec-test'},
            {'runner': 'avocado-runner-python-unittest'},
            {'runner': 'avocado-runner-avocado-instrumented'},
            {'runner': 'avocado-runner-tap'},
        ]
    }

    if 'golang' not in args.disable_plugin_checks:
        config_nrunner_interface['run.dict_variants'].append({
            'runner': 'avocado-runner-golang'})

    if 'robot' not in args.disable_plugin_checks:
        config_nrunner_interface['run.dict_variants'].append({
            'runner': 'avocado-runner-robot'})

    if not args.disable_selftests_nrunner_interface:
        suites.append(TestSuite.from_config(config_nrunner_interface, "nrunner-interface"))

    # ========================================================================
    # Run all static checks, unit and functional tests
    # ========================================================================

    selftests = []
    if not args.disable_selftests_unit:
        selftests.append('selftests/unit/')
    if not args.disable_selftests_jobs:
        selftests.append('selftests/jobs/')
    if not args.disable_selftests_functional:
        selftests.append('selftests/functional/')

    status_server = '127.0.0.1:%u' % find_free_port()
    config_check = {
        'run.references': selftests,
        'run.test_runner': 'nrunner',
        'nrunner.status_server_listen': status_server,
        'nrunner.status_server_uri': status_server,
        'run.ignore_missing_references': True,
        'job.output.testlogs.statuses': ['FAIL']
    }

    if not args.disable_static_checks:
        config_check['run.references'] += glob.glob('selftests/*.sh')

    if not args.disable_selftests_optional_plugins:
        for optional_plugin in glob.glob('optional_plugins/*'):
            plugin_name = os.path.basename(optional_plugin)
            if plugin_name not in args.disable_plugin_checks:
                pattern = '%s/tests/*' % optional_plugin
                config_check['run.references'] += glob.glob(pattern)

    suites.append(TestSuite.from_config(config_check, "check"))
    return suites