Exemplo n.º 1
0
    def __init__(self, monitoring_enabled):
        """Create a LinuxMonitored instance"""
        # Get the monitoring rules from piglit.conf and store them into a dict.
        self._monitoring_rules = {}

        if monitoring_enabled and PIGLIT_CONFIG.has_section('monitored-errors'):
            for key, _ in PIGLIT_CONFIG.items('monitored-errors'):
                if PIGLIT_CONFIG.has_section(key):
                    type = PIGLIT_CONFIG.required_get(key, 'type')
                    regex = PIGLIT_CONFIG.required_get(key, 'regex')
                    parameters = PIGLIT_CONFIG.required_get(key, 'parameters')

                    self.add_rule(key, type, parameters, regex)
Exemplo n.º 2
0
    def __init__(self, monitoring_enabled):
        """Create a LinuxMonitored instance"""
        # Get the monitoring rules from piglit.conf and store them into a dict.
        self._monitoring_rules = {}

        if monitoring_enabled and PIGLIT_CONFIG.has_section('monitored-errors'):
            for key, _ in PIGLIT_CONFIG.items('monitored-errors'):
                if PIGLIT_CONFIG.has_section(key):
                    type = PIGLIT_CONFIG.required_get(key, 'type')
                    regex = PIGLIT_CONFIG.required_get(key, 'regex')
                    parameters = PIGLIT_CONFIG.required_get(key, 'parameters')

                    self.add_rule(key, type, parameters, regex)
Exemplo n.º 3
0
    def __init__(self, dest, junit_suffix='', **options):
        super(JUnitBackend, self).__init__(dest, **options)
        self._test_suffix = junit_suffix

        # make dictionaries of all test names expected to crash/fail
        # for quick lookup when writing results.  Use lower-case to
        # provide case insensitive matches.
        self._expected_failures = {}
        if PIGLIT_CONFIG.has_section("expected-failures"):
            for (fail, _) in PIGLIT_CONFIG.items("expected-failures"):
                self._expected_failures[fail.lower()] = True
        self._expected_crashes = {}
        if PIGLIT_CONFIG.has_section("expected-crashes"):
            for (fail, _) in PIGLIT_CONFIG.items("expected-crashes"):
                self._expected_crashes[fail.lower()] = True
Exemplo n.º 4
0
def add_oclconform_tests(profile):
    section_name = 'oclconform'
    if not PIGLIT_CONFIG.has_section(section_name):
        return

    bindir = PIGLIT_CONFIG.get(section_name, 'bindir')
    options = PIGLIT_CONFIG.options(section_name)

    tests = (o for o in options if PIGLIT_CONFIG.get(section_name, o) is None)

    for test in tests:
        test_section_name = get_test_section_name(test)
        if not PIGLIT_CONFIG.has_section(test_section_name):
            print("Warning: no section defined for {}".format(test),
                  file=stderr)
            continue

        test_name = PIGLIT_CONFIG.get(test_section_name, 'test_name')
        should_run_concurrent = PIGLIT_CONFIG.has_option(
            test_section_name, 'concurrent')
        if PIGLIT_CONFIG.has_option(test_section_name, 'list_subtests'):
            # Test with subtests
            list_tests = PIGLIT_CONFIG.get(test_section_name, 'list_subtests')
            subtest_regex = PIGLIT_CONFIG.get(test_section_name,
                                              'subtest_regex')
            subtest_regex.encode('string_escape')
            run_subtests = PIGLIT_CONFIG.get(test_section_name, 'run_subtest')
            list_tests = list_tests.split()

            subtests = subprocess.check_output(args=list_tests,
                                               cwd=bindir).split('\n')
            for subtest in subtests:
                m = re.match(subtest_regex, subtest)
                if not m:
                    continue
                subtest = m.group(1)
                subtest_command = join(
                    bindir, run_subtests.replace('<subtest>', subtest))
                add_sub_test(
                    profile, test_name, subtest,
                    OCLConform(command=subtest_command,
                               run_concurrent=should_run_concurrent))
        else:
            run_test = PIGLIT_CONFIG.get(test_section_name, 'run_test')
            add_test(
                profile, test_name,
                OCLConform(command=run_test,
                           run_concurrent=should_run_concurrent))
def add_oclconform_tests(profile):
    section_name = 'oclconform'
    if not PIGLIT_CONFIG.has_section(section_name):
        return

    bindir = PIGLIT_CONFIG.get(section_name, 'bindir')
    options = PIGLIT_CONFIG.options(section_name)

    tests = (o for o in options if PIGLIT_CONFIG.get(section_name, o) is None)

    for test in tests:
        test_section_name = get_test_section_name(test)
        if not PIGLIT_CONFIG.has_section(test_section_name):
            print("Warning: no section defined for {}".format(test),
                  file=stderr)
            continue

        test_name = PIGLIT_CONFIG.get(test_section_name, 'test_name')
        should_run_concurrent = PIGLIT_CONFIG.has_option(test_section_name,
                                                         'concurrent')
        if PIGLIT_CONFIG.has_option(test_section_name, 'list_subtests'):
            list_tests = PIGLIT_CONFIG.get(test_section_name,
                                           'list_subtests')
            subtest_regex = PIGLIT_CONFIG.get(test_section_name,
                                              'subtest_regex')
            subtest_regex.encode('string_escape')
            run_subtests = PIGLIT_CONFIG.get(test_section_name, 'run_subtest')
            list_tests = list_tests.split()

            subtests = subprocess.check_output(args=list_tests,
                                               cwd=bindir).split('\n')
            for subtest in subtests:
                m = re.match(subtest_regex, subtest)
                if not m:
                    continue
                subtest = m.group(1)
                subtest_command = join(bindir,
                                       run_subtests.replace('<subtest>',
                                                            subtest))
                add_sub_test(profile, test_name, subtest,
                             OCLConform(command=subtest_command.split(),
                                        run_concurrent=should_run_concurrent))
        else:
            run_test = PIGLIT_CONFIG.get(test_section_name, 'run_test')
            add_test(profile, test_name,
                     OCLConform(command=run_test.split(),
                                run_concurrent=should_run_concurrent))
Exemplo n.º 6
0
def add_oclconform_tests(profile):
    section_name = "oclconform"
    if not PIGLIT_CONFIG.has_section(section_name):
        return

    bindir = PIGLIT_CONFIG.get(section_name, "bindir")
    options = PIGLIT_CONFIG.options(section_name)

    tests = (o for o in options if PIGLIT_CONFIG.get(section_name, o) is None)

    for test in tests:
        test_section_name = get_test_section_name(test)
        if not PIGLIT_CONFIG.has_section(test_section_name):
            print("Warning: no section defined for {}".format(test), file=stderr)
            continue

        test_name = PIGLIT_CONFIG.get(test_section_name, "test_name")
        should_run_concurrent = PIGLIT_CONFIG.has_option(test_section_name, "concurrent")
        if PIGLIT_CONFIG.has_option(test_section_name, "list_subtests"):
            # Test with subtests
            list_tests = PIGLIT_CONFIG.get(test_section_name, "list_subtests")
            subtest_regex = PIGLIT_CONFIG.get(test_section_name, "subtest_regex")
            subtest_regex.encode("string_escape")
            run_subtests = PIGLIT_CONFIG.get(test_section_name, "run_subtest")
            list_tests = list_tests.split()

            subtests = subprocess.check_output(args=list_tests, cwd=bindir).split("\n")
            for subtest in subtests:
                m = re.match(subtest_regex, subtest)
                if not m:
                    continue
                subtest = m.group(1)
                subtest_command = join(bindir, run_subtests.replace("<subtest>", subtest))
                add_sub_test(
                    profile,
                    test_name,
                    subtest,
                    OCLConform(command=subtest_command, run_concurrent=should_run_concurrent),
                )
        else:
            run_test = PIGLIT_CONFIG.get(test_section_name, "run_test")
            add_test(profile, test_name, OCLConform(command=run_test, run_concurrent=should_run_concurrent))
Exemplo n.º 7
0
    def __init__(self, dest, junit_suffix='', junit_subtests=False, **options):
        super(JUnitBackend, self).__init__(dest, **options)

        # make dictionaries of all test names expected to crash/fail
        # for quick lookup when writing results.  Use lower-case to
        # provide case insensitive matches.
        expected_failures = {}
        if PIGLIT_CONFIG.has_section("expected-failures"):
            for fail, _ in PIGLIT_CONFIG.items("expected-failures"):
                expected_failures[fail.lower()] = True
        expected_crashes = {}
        if PIGLIT_CONFIG.has_section("expected-crashes"):
            for fail, _ in PIGLIT_CONFIG.items("expected-crashes"):
                expected_crashes[fail.lower()] = True

        if not junit_subtests:
            self._write = JUnitWriter(junit_suffix, expected_failures,
                                      expected_crashes)
        else:
            self._write = JUnitSubtestWriter(  # pylint: disable=redefined-variable-type
                junit_suffix, expected_failures, expected_crashes)
Exemplo n.º 8
0
    def __init__(self, dest, metadata, **options):
        self._file = open(os.path.join(dest, 'results.xml'), 'w')
        FSyncMixin.__init__(self, **options)

        # make dictionaries of all test names expected to crash/fail
        # for quick lookup when writing results.  Use lower-case to
        # provide case insensitive matches.
        self._expected_failures = {}
        if PIGLIT_CONFIG.has_section("expected-failures"):
            for (fail, _) in PIGLIT_CONFIG.items("expected-failures"):
                self._expected_failures[fail.lower()] = True
        self._expected_crashes = {}
        if PIGLIT_CONFIG.has_section("expected-crashes"):
            for (fail, _) in PIGLIT_CONFIG.items("expected-crashes"):
                self._expected_crashes[fail.lower()] = True

        # Write initial headers and other data that etree cannot write for us
        self._file.write('<?xml version="1.0" encoding="UTF-8" ?>\n')
        self._file.write('<testsuites>\n')
        self._file.write(
            '<testsuite name="piglit" tests="{}">\n'.format(
                metadata['test_count']))
        self._test_suffix = metadata["test_suffix"]
Exemplo n.º 9
0
def test_get_option_conf_no_section():
    """deqp.get_option: if a no_section error is raised and env is unset None is return
    """
    assert not PIGLIT_CONFIG.has_section('deqp_test')
    nt.eq_(deqp.get_option('_PIGLIT_TEST_ENV', ('deqp_test', 'test_env')), None)