Exemple #1
0
def disabler(full_test_name: str, source_file: str,
             new_cond: Condition) -> str:
    existing_expectation: Optional[Expectation] = None
    condition = conditions.NEVER
    for expectation in TaggedTestListParser(source_file).expectations:
        if expectation.test == full_test_name:
            existing_expectation = expectation
            if set(expectation.results) & {'SKIP', 'FAIL'}:
                tags = set(expectation.tags)
                if not tags:
                    condition = conditions.ALWAYS

            break

    merged = conditions.merge(condition, new_cond)

    if existing_expectation is None:
        ex = Expectation(test=full_test_name,
                         results=['SKIP'],
                         tags=condition_to_tags(merged))

        while not source_file.endswith('\n\n'):
            source_file += '\n'
        source_file += ex.to_string()
        return source_file

    new_expectation = Expectation(
        reason=existing_expectation.reason,
        test=existing_expectation.test,
        trailing_comments=existing_expectation.trailing_comments,
        results=['SKIP'],
        tags=condition_to_tags(merged),
    )

    lines = source_file.split('\n')
    # Minus 1 as 'lineno' is 1-based.
    lines[existing_expectation.lineno - 1] = new_expectation.to_string()
    return '\n'.join(lines)
Exemple #2
0
 def expectations_for(self, test_case):
   test_name = test_case.id()[len(self.args.test_name_prefix):]
   if self.has_expectations:
       return self.expectations.expectations_for(test_name)
   else:
       return Expectation(test=test_name)
Exemple #3
0
def split_cts_expectations_and_web_test_expectations(
        expectations_file_contents, platform_tags=None):
    """Split web test expectations (bit.ly/chromium-test-list-format) into a Javascript
    module containing expectations for the WebGPU CTS, and a filtered list of the same web
    test expectations, excluding the bits handled by the WebGPU CTS. Returns an object:
    {
      cts_expectations_js: "export const expectations = [ ... ]",
      web_test_expectations: {
        expectations: <expectations contents>
        tag_set: <frozenset of tags used by the expectations>
        result_set: <frozenset of result tags used by the expectations>
      }
    }"""

    cts_expectations = []

    out_tag_set = set()
    out_result_set = set()
    out_expectations = []

    parser = TaggedTestListParser(expectations_file_contents)

    # For each expectation, append it to |cts_expectations| if the CTS can understand it.
    # Expectations not supported by the CTS will be forwarded to the web tests harness.
    # This allows us to preserve expectations like [ Slow Crash Timeout RetryOnFailure ].
    # It also preserves expectations like [ Pass ] which are used for test splitting.
    # TODO(crbug.com/1186320): Handle test splits / variant generation separately?
    # Web test expectations that are passed through are run as separate variants.
    # Since [ Slow Crash Timeout RetryOnFailure Pass ] are Web test expectations,
    # they have the downside that they must be a prefix of the test name. If they don't match
    # anything the variant generator will warn.
    # TODO(crbug.com/1186320): Also validate the CTS expectation query.
    # TODO(crbug.com/1186320): We may be able to use skip expectations in the
    # CTS for Crash/Timeout, and then have a separate test suite which runs only the problematic
    # tests. We would generate variants specifically for each expectation to avoid the
    # prefix problem. This would allow us to have exact test suppressions at the cost of
    # potentially running some tests multiple times if there are overlapping expectations.
    for exp in parser.expectations:
        # Skip expectations that are not relevant to this platform
        if platform_tags is not None and not exp.tags.issubset(platform_tags):
            continue

        results = exp.results
        raw_results = exp.raw_results

        # Do not do special handling of expectations that aren't for the CTS.
        # ex.) ref tests run in WPT without the CTS.
        # TODO(crbug.com/1186320): This could be a more robust check.
        if 'q=webgpu:' in exp.test:
            # Pass Skip expectations to the CTS.
            if ResultType.Skip in results:
                assert len(
                    results
                ) == 1, 'Skip expectations must not be combined with other expectations'
                cts_expectations.append({
                    'query': exp.test,
                    'expectation': 'skip'
                })
                continue

            # Consume the [ Failure ] expectation for the CTS, but forward along other expectations.
            # [ Pass, Crash, Timeout ] will impact variant generation.
            # TODO(crbug.com/1186320): Teach the CTS RetryOnFailure.
            if ResultType.Failure in results and not exp.should_retry_on_failure:
                cts_expectations.append({
                    'query': exp.test,
                    'expectation': 'fail'
                })

                results = results.difference(set((ResultType.Failure, )))
                raw_results = filter(lambda r: r != 'Failure', raw_results)

        if len(raw_results) != 0:
            # Forward everything, with the modified results.
            out_exp = Expectation(reason=exp.reason,
                                  test=exp.test,
                                  results=results,
                                  lineno=exp.lineno,
                                  retry_on_failure=exp.should_retry_on_failure,
                                  is_slow_test=exp.is_slow_test,
                                  conflict_resolution=exp.conflict_resolution,
                                  raw_tags=exp.raw_tags,
                                  raw_results=raw_results,
                                  is_glob=exp.is_glob,
                                  trailing_comments=exp.trailing_comments)

            out_expectations.append(out_exp)

            # Add the results and tags the expectation uses to sets.
            # We will prepend these to the top of the out file.
            out_result_set = out_result_set.union(out_exp.raw_results)
            out_tag_set = out_tag_set.union(out_exp.raw_tags)

    return {
        'cts_expectations_js':
        'export const expectations = ' + json.dumps(cts_expectations),
        'web_test_expectations': {
            'expectations': out_expectations,
            'tag_set': out_tag_set,
            'result_set': out_result_set
        }
    }