Exemple #1
0
def create_suites(args):  # pylint: disable=W0621
    suites = []
    # ========================================================================
    # Run nrunner interface checks for all available runners
    # ========================================================================
    config_nrunner_interface = {
        'resolver.references':
        ['selftests/functional/test_nrunner_interface.py'],
        'run.dict_variants.variant_id_keys': ['runner'],
        'run.dict_variants': [
            {
                'runner': 'avocado-runner',
                'runnable-run-no-args-exit-code': 2,
                'runnable-run-uri-only-exit-code': 2
            },
            {
                'runner': 'avocado-runner-dry-run',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
            {
                'runner': 'avocado-runner-noop',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
            {
                'runner': 'avocado-runner-exec-test',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
            {
                'runner': 'avocado-runner-python-unittest',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
            {
                'runner': 'avocado-runner-avocado-instrumented',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
            {
                'runner': 'avocado-runner-tap',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
        ]
    }

    if (python_module_available('avocado-framework-plugin-golang')
            and 'golang' not in args.disable_plugin_checks):
        config_nrunner_interface['run.dict_variants'].append({
            'runner':
            'avocado-runner-golang',
            'runnable-run-no-args-exit-code':
            0,
            'runnable-run-uri-only-exit-code':
            0
        })

    if (python_module_available('avocado-framework-plugin-robot')
            and 'robot' not in args.disable_plugin_checks):
        config_nrunner_interface['run.dict_variants'].append({
            'runner':
            'avocado-runner-robot',
            'runnable-run-no-args-exit-code':
            0,
            'runnable-run-uri-only-exit-code':
            0
        })

    if args.dict_tests['nrunner-interface']:
        suites.append(
            TestSuite.from_config(config_nrunner_interface,
                                  "nrunner-interface"))

    # ========================================================================
    # Run all static checks, unit and functional tests
    # ========================================================================

    config_check = {
        'run.ignore_missing_references': True,
    }

    if args.dict_tests['unit']:
        config_check_unit = copy.copy(config_check)
        config_check_unit['resolver.references'] = ['selftests/unit/']
        suites.append(TestSuite.from_config(config_check_unit, "unit"))

    if args.dict_tests['jobs']:
        config_check_jobs = copy.copy(config_check)
        config_check_jobs['resolver.references'] = ['selftests/jobs/']
        suites.append(TestSuite.from_config(config_check_jobs, "jobs"))

    if args.dict_tests['functional']:
        functional_path = os.path.join('selftests', 'functional')
        references = glob.glob(os.path.join(functional_path, 'test*.py'))
        references.extend([
            os.path.join(functional_path, 'utils'),
            os.path.join(functional_path, 'plugin')
        ])
        config_check_functional_parallel = copy.copy(config_check)
        config_check_functional_parallel['resolver.references'] = references
        suites.append(
            TestSuite.from_config(config_check_functional_parallel,
                                  "functional-parallel"))

        config_check_functional_serial = copy.copy(config_check)
        config_check_functional_serial['resolver.references'] = [
            'selftests/functional/serial/'
        ]
        config_check_functional_serial['nrunner.max_parallel_tasks'] = 1
        suites.append(
            TestSuite.from_config(config_check_functional_serial,
                                  "functional-serial"))

    if args.dict_tests['static-checks']:
        config_check_static = copy.copy(config_check)
        config_check_static['resolver.references'] = glob.glob(
            'selftests/*.sh')
        suites.append(
            TestSuite.from_config(config_check_static, "static-checks"))

    if args.dict_tests['optional-plugins']:
        config_check_optional = copy.copy(config_check)
        config_check_optional['resolver.references'] = []
        for optional_plugin in glob.glob('optional_plugins/*'):
            plugin_name = os.path.basename(optional_plugin)
            if plugin_name not in args.disable_plugin_checks:
                pattern = '%s/tests/*' % optional_plugin
                config_check_optional['resolver.references'] += glob.glob(
                    pattern)

        suites.append(
            TestSuite.from_config(config_check_optional, "optional-plugins"))

    return suites
Exemple #2
0
def create_suite_job_api(args):  # pylint: disable=W0621
    suites = []

    def get_ref(method_short_name):
        return [
            '%s:JobAPIFeaturesTest.test_%s' % (__file__, method_short_name)
        ]

    # ========================================================================
    # Test if the archive file was created
    # ========================================================================
    config_check_archive_file_exists = {
        'resolver.references':
        get_ref('check_archive_file_exists'),
        'run.dict_variants.variant_id_keys': ['namespace', 'value'],
        'run.dict_variants': [
            {
                'namespace': 'run.results.archive',
                'value': True,
                'assert': True
            },
        ]
    }

    suites.append(
        TestSuite.from_config(config_check_archive_file_exists,
                              "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if the category directory was created
    # ========================================================================
    config_check_category_directory_exists = {
        'resolver.references':
        get_ref('check_category_directory_exists'),
        'run.dict_variants.variant_id_keys': ['namespace', 'value'],
        'run.dict_variants': [
            {
                'namespace': 'run.job_category',
                'value': 'foo',
                'assert': True
            },
        ]
    }

    suites.append(
        TestSuite.from_config(config_check_category_directory_exists,
                              "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if a directory was created
    # ========================================================================
    config_check_directory_exists = {
        'resolver.references':
        get_ref('check_directory_exists'),
        'run.dict_variants.variant_id_keys': ['namespace', 'value'],
        'run.dict_variants': [
            {
                'namespace': 'sysinfo.collect.enabled',
                'value': True,
                'directory': 'sysinfo',
                'assert': True
            },
            {
                'namespace': 'sysinfo.collect.enabled',
                'value': False,
                'directory': 'sysinfo',
                'assert': False
            },
        ]
    }

    suites.append(
        TestSuite.from_config(config_check_directory_exists,
                              "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test the content of a file
    # ========================================================================
    config_check_file_content = {
        'resolver.references':
        get_ref('check_file_content'),
        'run.dict_variants.variant_id_keys': ['namespace', 'value', 'file'],
        'run.dict_variants': [
            # finding the correct 'content' here is trick because any
            # simple string is added to the variant file name and is
            # found in the log file.
            # Using DEBUG| makes the variant name have DEBUG_, working
            # fine here.
            {
                'namespace': 'job.output.loglevel',
                'value': 'INFO',
                'file': 'job.log',
                'content': r'DEBUG\| Test metadata:$',
                'assert': False,
                'regex': True
            },
            {
                'namespace': 'job.run.result.tap.include_logs',
                'value': True,
                'file': 'results.tap',
                'reference': ['examples/tests/passtest.py:PassTest.test'],
                'content': 'PASS 1-examples/tests/passtest.py:PassTest.test',
                'assert': True
            },
            {
                'namespace': 'job.run.result.tap.include_logs',
                'value': False,
                'file': 'results.tap',
                'content': "Command '/bin/true' finished with 0",
                'assert': False
            },
            {
                'namespace': 'job.run.result.xunit.job_name',
                'value': 'foo',
                'file': 'results.xml',
                'content': 'name="foo"',
                'assert': True
            },
            {
                'namespace': 'job.run.result.xunit.max_test_log_chars',
                'value': 1,
                'file': 'results.xml',
                'content': '--[ CUT DUE TO XML PER TEST LIMIT ]--',
                'assert': True,
                'reference': ['examples/tests/failtest.py:FailTest.test'],
                'exit_code': 1
            },
            {
                'namespace': 'run.failfast',
                'value': True,
                'file': 'results.json',
                'content': '"skip": 1',
                'assert': True,
                'reference': ['/bin/false', '/bin/true'],
                'exit_code': 9,
                'extra_job_config': {
                    'nrunner.max_parallel_tasks': 1
                }
            },
            {
                'namespace': 'run.ignore_missing_references',
                'value': 'on',
                'file': 'results.json',
                'content': '"pass": 1',
                'assert': True,
                'reference': ['/bin/true', 'foo']
            },
            {
                'namespace': 'run.unique_job_id',
                'value': 'abcdefghi',
                'file': 'job.log',
                'content': 'Job ID: abcdefghi',
                'assert': True
            },
            {
                'namespace': 'job.run.timeout',
                'value': 1,
                'reference': ['examples/tests/sleeptenmin.py'],
                'file': 'job.log',
                'content': 'RuntimeError: Test interrupted by SIGTERM',
                'assert': True,
                'exit_code': 8
            },
        ]
    }

    suites.append(
        TestSuite.from_config(config_check_file_content,
                              "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if the result file was created
    # ========================================================================
    config_check_file_exists = {
        'resolver.references':
        get_ref('check_file_exists'),
        'run.dict_variants.variant_id_keys': ['namespace', 'value'],
        'run.dict_variants': [
            {
                'namespace': 'job.run.result.json.enabled',
                'value': True,
                'file': 'results.json',
                'assert': True
            },
            {
                'namespace': 'job.run.result.json.enabled',
                'value': False,
                'file': 'results.json',
                'assert': False
            },
            {
                'namespace': 'job.run.result.tap.enabled',
                'value': True,
                'file': 'results.tap',
                'assert': True
            },
            {
                'namespace': 'job.run.result.tap.enabled',
                'value': False,
                'file': 'results.tap',
                'assert': False
            },
            {
                'namespace': 'job.run.result.xunit.enabled',
                'value': True,
                'file': 'results.xml',
                'assert': True
            },
            {
                'namespace': 'job.run.result.xunit.enabled',
                'value': False,
                'file': 'results.xml',
                'assert': False
            },
            {
                'namespace': 'run.dry_run.enabled',
                'value': True,
                'file': 'job.log',
                'assert': False
            },
            {
                'namespace': 'run.dry_run.no_cleanup',
                'value': True,
                'file': 'job.log',
                'assert': True
            },
            {
                'namespace': 'plugins.disable',
                'value': ['result.xunit'],
                'file': 'result.xml',
                'assert': False
            },

            # this test needs a huge improvement
            {
                'namespace': 'run.journal.enabled',
                'value': True,
                'file': '.journal.sqlite',
                'assert': True
            },
        ]
    }

    if (python_module_available('avocado-framework-plugin-result-html')
            and 'html' not in args.disable_plugin_checks):

        config_check_file_exists['run.dict_variants'].append({
            'namespace': 'job.run.result.html.enabled',
            'value': True,
            'file': 'results.html',
            'assert': True
        })

        config_check_file_exists['run.dict_variants'].append({
            'namespace': 'job.run.result.html.enabled',
            'value': False,
            'file': 'results.html',
            'assert': False
        })

    suites.append(
        TestSuite.from_config(config_check_file_exists,
                              "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if a file was created
    # ========================================================================
    config_check_output_file = {
        'resolver.references':
        get_ref('check_output_file'),
        'run.dict_variants.variant_id_keys': ['namespace', 'file'],
        'run.dict_variants': [
            {
                'namespace': 'job.run.result.json.output',
                'file': 'custom.json',
                'assert': True
            },

            # https://github.com/avocado-framework/avocado/issues/4034
            {
                'namespace': 'job.run.result.tap.output',
                'file': 'custom.tap',
                'assert': True
            },
            {
                'namespace': 'job.run.result.xunit.output',
                'file': 'custom.xml',
                'assert': True
            },
        ]
    }

    if (python_module_available('avocado-framework-plugin-result-html')
            and 'html' not in args.disable_plugin_checks):

        config_check_output_file['run.dict_variants'].append({
            'namespace': 'job.run.result.html.output',
            'file': 'custom.html',
            'assert': True
        })

    suites.append(
        TestSuite.from_config(config_check_output_file,
                              "job-api-%s" % (len(suites) + 1)))

    # ========================================================================
    # Test if the temporary directory was created
    # ========================================================================
    config_check_tmp_directory_exists = {
        'resolver.references':
        get_ref('check_tmp_directory_exists'),
        'run.dict_variants.variant_id_keys': ['namespace', 'value'],
        'run.dict_variants': [
            {
                'namespace': 'run.keep_tmp',
                'value': True,
                'assert': True
            },
        ]
    }

    suites.append(
        TestSuite.from_config(config_check_tmp_directory_exists,
                              "job-api-%s" % (len(suites) + 1)))
    return suites
Exemple #3
0
    def test_plugin_order(self):
        """
        Tests plugin order by configuration file

        First it checks if html, json, xunit and zip_archive plugins are enabled.
        Then it runs a test with zip_archive running first, which means the html,
        json and xunit output files do not make into the archive.

        Then it runs with zip_archive set to run last, which means the html,
        json and xunit output files *do* make into the archive.
        """
        def run_config(config_path):
            cmd = (f"{AVOCADO} --config {config_path} "
                   f"run examples/tests/passtest.py --archive "
                   f"--job-results-dir {self.tmpdir.name} "
                   f"--disable-sysinfo")
            result = process.run(cmd, ignore_status=True)
            expected_rc = exit_codes.AVOCADO_ALL_OK
            self.assertEqual(
                result.exit_status,
                expected_rc,
                (f"Avocado did not return rc {expected_rc}:"
                 f"\n{result}"),
            )

        result_plugins = ["json", "xunit", "zip_archive"]
        result_outputs = ["results.json", "results.xml"]
        if python_module_available("avocado-framework-plugin-result-html"):
            result_plugins.append("html")
            result_outputs.append("results.html")

        cmd_line = f"{AVOCADO} plugins"
        result = process.run(cmd_line, ignore_status=True)
        expected_rc = exit_codes.AVOCADO_ALL_OK
        self.assertEqual(
            result.exit_status,
            expected_rc,
            f"Avocado did not return rc {expected_rc}:\n{result}",
        )
        for result_plugin in result_plugins:
            self.assertIn(result_plugin, result.stdout_text)

        config_content_zip_first = "[plugins.result]\norder=['zip_archive']"
        config_zip_first = script.TemporaryScript("zip_first.conf",
                                                  config_content_zip_first)
        with config_zip_first:
            run_config(config_zip_first)
            archives = glob.glob(os.path.join(self.tmpdir.name, "*.zip"))
            self.assertEqual(len(archives), 1, "ZIP Archive not generated")
            zip_file = zipfile.ZipFile(archives[0], "r")
            zip_file_list = zip_file.namelist()
            for result_output in result_outputs:
                self.assertNotIn(result_output, zip_file_list)
            os.unlink(archives[0])

        config_content_zip_last = ("[plugins.result]\norder=['html', 'json',"
                                   "'xunit', 'non_existing_plugin_is_ignored'"
                                   ",'zip_archive']")
        config_zip_last = script.TemporaryScript("zip_last.conf",
                                                 config_content_zip_last)
        with config_zip_last:
            run_config(config_zip_last)
            archives = glob.glob(os.path.join(self.tmpdir.name, "*.zip"))
            self.assertEqual(len(archives), 1, "ZIP Archive not generated")
            zip_file = zipfile.ZipFile(archives[0], "r")
            zip_file_list = zip_file.namelist()
            for result_output in result_outputs:
                self.assertIn(result_output, zip_file_list)
Exemple #4
0
def create_suites(args):  # pylint: disable=W0621
    suites = []
    # ========================================================================
    # Run nrunner interface checks for all available runners
    # ========================================================================
    config_nrunner_interface = {
        'resolver.references':
        ['selftests/functional/test_nrunner_interface.py'],
        'run.dict_variants.variant_id_keys': ['runner'],
        'run.dict_variants': [
            {
                'runner': 'avocado-runner',
                'runnable-run-no-args-exit-code': 2,
                'runnable-run-uri-only-exit-code': 2
            },
            {
                'runner': 'avocado-runner-noop',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
            {
                'runner': 'avocado-runner-exec-test',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
            {
                'runner': 'avocado-runner-python-unittest',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
            {
                'runner': 'avocado-runner-avocado-instrumented',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
            {
                'runner': 'avocado-runner-tap',
                'runnable-run-no-args-exit-code': 0,
                'runnable-run-uri-only-exit-code': 0
            },
        ]
    }

    if (python_module_available('avocado-framework-plugin-golang')
            and 'golang' not in args.disable_plugin_checks):
        config_nrunner_interface['run.dict_variants'].append({
            'runner':
            'avocado-runner-golang',
            'runnable-run-no-args-exit-code':
            0,
            'runnable-run-uri-only-exit-code':
            0
        })

    if (python_module_available('avocado-framework-plugin-robot')
            and 'robot' not in args.disable_plugin_checks):
        config_nrunner_interface['run.dict_variants'].append({
            'runner':
            'avocado-runner-robot',
            'runnable-run-no-args-exit-code':
            0,
            'runnable-run-uri-only-exit-code':
            0
        })

    if args.nrunner_interface:
        suites.append(
            TestSuite.from_config(config_nrunner_interface,
                                  "nrunner-interface"))

    # ========================================================================
    # Run all static checks, unit and functional tests
    # ========================================================================

    selftests = []
    if args.unit:
        selftests.append('selftests/unit/')
    if args.jobs:
        selftests.append('selftests/jobs/')
    if args.functional:
        selftests.append('selftests/functional/')

    config_check = {
        'resolver.references': selftests,
        'run.ignore_missing_references': True
    }

    if args.static_checks:
        config_check['resolver.references'] += glob.glob('selftests/*.sh')

    if args.optional_plugins:
        for optional_plugin in glob.glob('optional_plugins/*'):
            plugin_name = os.path.basename(optional_plugin)
            if plugin_name not in args.disable_plugin_checks:
                pattern = '%s/tests/*' % optional_plugin
                config_check['resolver.references'] += glob.glob(pattern)

    suites.append(TestSuite.from_config(config_check, "check"))

    return suites
Exemple #5
0
def create_suites(args):  # pylint: disable=W0621
    suites = []
    # ========================================================================
    # Run nrunner interface checks for all available runners
    # ========================================================================
    config_nrunner_interface = {
        "resolver.references":
        ["selftests/functional/test_nrunner_interface.py"],
        "run.dict_variants.variant_id_keys": ["runner"],
        "run.dict_variants": [
            {
                "runner": "avocado-runner",
                "runnable-run-no-args-exit-code": 2,
                "runnable-run-uri-only-exit-code": 2,
                "task-run-id-only-exit-code": 2,
            },
            {
                "runner": "avocado-runner-dry-run",
                "runnable-run-no-args-exit-code": 0,
                "runnable-run-uri-only-exit-code": 0,
                "task-run-id-only-exit-code": 0,
            },
            {
                "runner": "avocado-runner-noop",
                "runnable-run-no-args-exit-code": 0,
                "runnable-run-uri-only-exit-code": 0,
                "task-run-id-only-exit-code": 0,
            },
            {
                "runner": "avocado-runner-exec-test",
                "runnable-run-no-args-exit-code": 0,
                "runnable-run-uri-only-exit-code": 0,
                "task-run-id-only-exit-code": 0,
            },
            {
                "runner": "avocado-runner-python-unittest",
                "runnable-run-no-args-exit-code": 0,
                "runnable-run-uri-only-exit-code": 0,
                "task-run-id-only-exit-code": 0,
            },
            {
                "runner": "avocado-runner-avocado-instrumented",
                "runnable-run-no-args-exit-code": 0,
                "runnable-run-uri-only-exit-code": 0,
                "task-run-id-only-exit-code": 0,
            },
            {
                "runner": "avocado-runner-tap",
                "runnable-run-no-args-exit-code": 0,
                "runnable-run-uri-only-exit-code": 0,
                "task-run-id-only-exit-code": 0,
            },
            {
                "runner": "avocado-runner-podman-image",
                "runnable-run-no-args-exit-code": 0,
                "runnable-run-uri-only-exit-code": 0,
                "task-run-id-only-exit-code": 0,
            },
        ],
    }

    if (python_module_available("avocado-framework-plugin-golang")
            and "golang" not in args.disable_plugin_checks):
        config_nrunner_interface["run.dict_variants"].append({
            "runner":
            "avocado-runner-golang",
            "runnable-run-no-args-exit-code":
            0,
            "runnable-run-uri-only-exit-code":
            0,
            "task-run-id-only-exit-code":
            0,
        })

    if (python_module_available("avocado-framework-plugin-robot")
            and "robot" not in args.disable_plugin_checks):
        config_nrunner_interface["run.dict_variants"].append({
            "runner":
            "avocado-runner-robot",
            "runnable-run-no-args-exit-code":
            0,
            "runnable-run-uri-only-exit-code":
            0,
            "task-run-id-only-exit-code":
            0,
        })

    if (python_module_available("avocado-framework-plugin-ansible")
            and "ansible" not in args.disable_plugin_checks):
        config_nrunner_interface["run.dict_variants"].append({
            "runner":
            "avocado-runner-ansible-module",
            "runnable-run-no-args-exit-code":
            0,
            "runnable-run-uri-only-exit-code":
            0,
            "task-run-id-only-exit-code":
            0,
        })

    if args.dict_tests["nrunner-interface"]:
        suites.append(
            TestSuite.from_config(config_nrunner_interface,
                                  "nrunner-interface"))

    # ========================================================================
    # Run functional requirement tests
    # ========================================================================
    config_nrunner_requirement = {
        "resolver.references":
        ["selftests/functional/serial/test_requirements.py"],
        "nrunner.max_parallel_tasks": 1,
        "run.dict_variants": [
            {
                "spawner": "process"
            },
            {
                "spawner": "podman"
            },
        ],
    }

    if args.dict_tests["nrunner-requirement"]:
        suites.append(
            TestSuite.from_config(config_nrunner_requirement,
                                  "nrunner-requirement"))

    # ========================================================================
    # Run all static checks, unit and functional tests
    # ========================================================================

    config_check = {
        "run.ignore_missing_references": True,
    }

    if args.dict_tests["unit"]:
        config_check_unit = copy.copy(config_check)
        config_check_unit["resolver.references"] = ["selftests/unit/"]
        suites.append(TestSuite.from_config(config_check_unit, "unit"))

    if args.dict_tests["jobs"]:
        config_check_jobs = copy.copy(config_check)
        config_check_jobs["resolver.references"] = ["selftests/jobs/"]
        suites.append(TestSuite.from_config(config_check_jobs, "jobs"))

    if args.dict_tests["functional"]:
        functional_path = os.path.join("selftests", "functional")
        references = glob.glob(os.path.join(functional_path, "test*.py"))
        references.extend([
            os.path.join(functional_path, "utils"),
            os.path.join(functional_path, "plugin"),
        ])
        config_check_functional_parallel = copy.copy(config_check)
        config_check_functional_parallel["resolver.references"] = references
        suites.append(
            TestSuite.from_config(config_check_functional_parallel,
                                  "functional-parallel"))

        config_check_functional_serial = copy.copy(config_check)
        config_check_functional_serial["resolver.references"] = [
            "selftests/functional/serial/"
        ]
        config_check_functional_serial["nrunner.max_parallel_tasks"] = 1
        suites.append(
            TestSuite.from_config(config_check_functional_serial,
                                  "functional-serial"))

    if args.dict_tests["static-checks"]:
        config_check_static = copy.copy(config_check)
        config_check_static["resolver.references"] = glob.glob(
            "selftests/*.sh")
        suites.append(
            TestSuite.from_config(config_check_static, "static-checks"))

    if args.dict_tests["optional-plugins"]:
        config_check_optional = copy.copy(config_check)
        config_check_optional["resolver.references"] = []
        for optional_plugin in glob.glob("optional_plugins/*"):
            plugin_name = os.path.basename(optional_plugin)
            if plugin_name not in args.disable_plugin_checks:
                pattern = f"{optional_plugin}/tests/*"
                config_check_optional["resolver.references"] += glob.glob(
                    pattern)

        suites.append(
            TestSuite.from_config(config_check_optional, "optional-plugins"))

    return suites
Exemple #6
0
def create_suite_job_api(args):  # pylint: disable=W0621
    suites = []

    def get_ref(method_short_name):
        return [f"{__file__}:JobAPIFeaturesTest.test_{method_short_name}"]

    # ========================================================================
    # Test if the archive file was created
    # ========================================================================
    config_check_archive_file_exists = {
        "resolver.references":
        get_ref("check_archive_file_exists"),
        "run.dict_variants.variant_id_keys": ["namespace", "value"],
        "run.dict_variants": [
            {
                "namespace": "run.results.archive",
                "value": True,
                "assert": True
            },
        ],
    }

    suites.append(
        TestSuite.from_config(config_check_archive_file_exists,
                              f"job-api-{len(suites) + 1}"))

    # ========================================================================
    # Test if the category directory was created
    # ========================================================================
    config_check_category_directory_exists = {
        "resolver.references":
        get_ref("check_category_directory_exists"),
        "run.dict_variants.variant_id_keys": ["namespace", "value"],
        "run.dict_variants": [
            {
                "namespace": "run.job_category",
                "value": "foo",
                "assert": True
            },
        ],
    }

    suites.append(
        TestSuite.from_config(config_check_category_directory_exists,
                              f"job-api-{len(suites) + 1}"))

    # ========================================================================
    # Test if a directory was created
    # ========================================================================
    config_check_directory_exists = {
        "resolver.references":
        get_ref("check_directory_exists"),
        "run.dict_variants.variant_id_keys": ["namespace", "value"],
        "run.dict_variants": [
            {
                "namespace": "sysinfo.collect.enabled",
                "value": True,
                "directory": "sysinfo",
                "assert": True,
            },
            {
                "namespace": "sysinfo.collect.enabled",
                "value": False,
                "directory": "sysinfo",
                "assert": False,
            },
        ],
    }

    suites.append(
        TestSuite.from_config(config_check_directory_exists,
                              f"job-api-{len(suites) + 1}"))

    # ========================================================================
    # Test the content of a file
    # ========================================================================
    config_check_file_content = {
        "resolver.references":
        get_ref("check_file_content"),
        "run.dict_variants.variant_id_keys": ["namespace", "value", "file"],
        "run.dict_variants": [
            # finding the correct 'content' here is trick because any
            # simple string is added to the variant file name and is
            # found in the log file.
            # Using DEBUG| makes the variant name have DEBUG_, working
            # fine here.
            {
                "namespace": "job.output.loglevel",
                "value": "INFO",
                "file": "job.log",
                "content": r"DEBUG\| Test metadata:$",
                "assert": False,
                "regex": True,
            },
            {
                "namespace": "job.run.result.tap.include_logs",
                "value": True,
                "file": "results.tap",
                "reference": ["examples/tests/passtest.py:PassTest.test"],
                "content": "PASS 1-examples/tests/passtest.py:PassTest.test",
                "assert": True,
            },
            {
                "namespace": "job.run.result.tap.include_logs",
                "value": False,
                "file": "results.tap",
                "content": "Command '/bin/true' finished with 0",
                "assert": False,
            },
            {
                "namespace": "job.run.result.xunit.job_name",
                "value": "foo",
                "file": "results.xml",
                "content": 'name="foo"',
                "assert": True,
            },
            {
                "namespace": "job.run.result.xunit.max_test_log_chars",
                "value": 1,
                "file": "results.xml",
                "content": "--[ CUT DUE TO XML PER TEST LIMIT ]--",
                "assert": True,
                "reference": ["examples/tests/failtest.py:FailTest.test"],
                "exit_code": 1,
            },
            {
                "namespace": "run.failfast",
                "value": True,
                "file": "results.json",
                "content": '"skip": 1',
                "assert": True,
                "reference": ["/bin/false", "/bin/true"],
                "exit_code": 9,
                "extra_job_config": {
                    "nrunner.max_parallel_tasks": 1
                },
            },
            {
                "namespace": "run.ignore_missing_references",
                "value": "on",
                "file": "results.json",
                "content": '"pass": 1',
                "assert": True,
                "reference": ["/bin/true", "foo"],
            },
            {
                "namespace": "run.unique_job_id",
                "value": "abcdefghi",
                "file": "job.log",
                "content": "Job ID: abcdefghi",
                "assert": True,
            },
            {
                "namespace": "job.run.timeout",
                "value": 1,
                "reference": ["examples/tests/sleeptenmin.py"],
                "file": "job.log",
                "content": "RuntimeError: Test interrupted by SIGTERM",
                "assert": True,
                "exit_code": 8,
            },
        ],
    }

    suites.append(
        TestSuite.from_config(config_check_file_content,
                              f"job-api-{len(suites) + 1}"))

    # ========================================================================
    # Test if the result file was created
    # ========================================================================
    config_check_file_exists = {
        "resolver.references":
        get_ref("check_file_exists"),
        "run.dict_variants.variant_id_keys": ["namespace", "value"],
        "run.dict_variants": [
            {
                "namespace": "job.run.result.json.enabled",
                "value": True,
                "file": "results.json",
                "assert": True,
            },
            {
                "namespace": "job.run.result.json.enabled",
                "value": False,
                "file": "results.json",
                "assert": False,
            },
            {
                "namespace": "job.run.result.tap.enabled",
                "value": True,
                "file": "results.tap",
                "assert": True,
            },
            {
                "namespace": "job.run.result.tap.enabled",
                "value": False,
                "file": "results.tap",
                "assert": False,
            },
            {
                "namespace": "job.run.result.xunit.enabled",
                "value": True,
                "file": "results.xml",
                "assert": True,
            },
            {
                "namespace": "job.run.result.xunit.enabled",
                "value": False,
                "file": "results.xml",
                "assert": False,
            },
            {
                "namespace": "run.dry_run.enabled",
                "value": True,
                "file": "job.log",
                "assert": False,
            },
            {
                "namespace": "run.dry_run.no_cleanup",
                "value": True,
                "file": "job.log",
                "assert": True,
            },
            {
                "namespace": "plugins.disable",
                "value": ["result.xunit"],
                "file": "result.xml",
                "assert": False,
            },
            # this test needs a huge improvement
            {
                "namespace": "run.journal.enabled",
                "value": True,
                "file": ".journal.sqlite",
                "assert": True,
            },
        ],
    }

    if (python_module_available("avocado-framework-plugin-result-html")
            and "html" not in args.disable_plugin_checks):

        config_check_file_exists["run.dict_variants"].append({
            "namespace": "job.run.result.html.enabled",
            "value": True,
            "file": "results.html",
            "assert": True,
        })

        config_check_file_exists["run.dict_variants"].append({
            "namespace": "job.run.result.html.enabled",
            "value": False,
            "file": "results.html",
            "assert": False,
        })

    suites.append(
        TestSuite.from_config(config_check_file_exists,
                              f"job-api-{len(suites) + 1}"))

    # ========================================================================
    # Test if a file was created
    # ========================================================================
    config_check_output_file = {
        "resolver.references":
        get_ref("check_output_file"),
        "run.dict_variants.variant_id_keys": ["namespace", "file"],
        "run.dict_variants": [
            {
                "namespace": "job.run.result.json.output",
                "file": "custom.json",
                "assert": True,
            },
            # https://github.com/avocado-framework/avocado/issues/4034
            {
                "namespace": "job.run.result.tap.output",
                "file": "custom.tap",
                "assert": True,
            },
            {
                "namespace": "job.run.result.xunit.output",
                "file": "custom.xml",
                "assert": True,
            },
        ],
    }

    if (python_module_available("avocado-framework-plugin-result-html")
            and "html" not in args.disable_plugin_checks):

        config_check_output_file["run.dict_variants"].append({
            "namespace": "job.run.result.html.output",
            "file": "custom.html",
            "assert": True,
        })

    suites.append(
        TestSuite.from_config(config_check_output_file,
                              f"job-api-{len(suites) + 1}"))

    # ========================================================================
    # Test if the temporary directory was created
    # ========================================================================
    config_check_tmp_directory_exists = {
        "resolver.references":
        get_ref("check_tmp_directory_exists"),
        "run.dict_variants.variant_id_keys": ["namespace", "value"],
        "run.dict_variants": [
            {
                "namespace": "run.keep_tmp",
                "value": True,
                "assert": True
            },
        ],
    }

    suites.append(
        TestSuite.from_config(config_check_tmp_directory_exists,
                              f"job-api-{len(suites) + 1}"))
    return suites