Beispiel #1
0
def GenerateTestcases(
  generator_config: generator_pb2.ClgenGenerator,
  output_directory: pathlib.Path,
  num_testcases: int,
) -> None:
  app.Log(1, "Writing output to %s", output_directory)
  (output_directory / "generated_kernels").mkdir(parents=True, exist_ok=True)
  (output_directory / "generated_testcases").mkdir(parents=True, exist_ok=True)

  app.Log(1, "Preparing test case generator.")
  generator = clgen.ClgenGenerator(generator_config)

  # Generate testcases.
  app.Log(1, "Generating %d testcases ...", num_testcases)
  req = generator_pb2.GenerateTestcasesRequest()
  req.num_testcases = num_testcases
  res = generator.GenerateTestcases(req, None)

  for testcase in res.testcases:
    # Write kernel to file.
    kernel = testcase.inputs["src"]
    kernel_id = crypto.md5_str(kernel)
    with open(
      output_directory / "generated_kernels" / f"{kernel_id}.cl", "w"
    ) as f:
      f.write(kernel)

    # Write testcase to file.
    testcase_id = crypto.md5_str(str(testcase))
    pbutil.ToFile(
      testcase,
      output_directory / "generated_testcases" / f"{testcase_id}.pbtxt",
    )

  app.Log(
    1,
    "%d testcases written to %s",
    num_testcases,
    output_directory / "generated_testcases",
  )
  generation_times = [
    testcase.profiling_events[0].duration_ms for testcase in res.testcases
  ]
  app.Log(
    1,
    "Average time to generate testcase: %.2f ms",
    sum(generation_times) / len(generation_times),
  )
Beispiel #2
0
def test_ClsmithGenerator_GenerateTestcases(
    abc_generator: clsmith.ClsmithGenerator, ):
    """End-to-end test of testcase generation."""
    req = generator_pb2.GenerateTestcasesRequest(num_testcases=10)
    res = abc_generator.GenerateTestcases(req, None)
    assert len(res.testcases) == 10
    for i in range(0, 10, 2):
        assert res.testcases[i].inputs["gsize"] == "1,1,1"
        assert res.testcases[i + 1].inputs["gsize"] == "128,16,1"
        assert res.testcases[i].inputs["lsize"] == "1,1,1"
        assert res.testcases[i + 1].inputs["lsize"] == "8,4,1"
        assert res.testcases[i].inputs["src"] == res.testcases[i +
                                                               1].inputs["src"]
    # It is unlikely that the same program is generated 10 times, but this is
    # technically flaky.
    assert (res.testcases[0].inputs["src"] != res.testcases[2].inputs["src"] !=
            res.testcases[4].inputs["src"] != res.testcases[6].inputs["src"] !=
            res.testcases[8].inputs["src"])
Beispiel #3
0
def GenerateTestcases(generator_config: generator_pb2.ClgenGenerator,
                      output_directory: pathlib.Path,
                      num_testcases: int) -> None:
    logging.info('Writing output to %s', output_directory)
    (output_directory / 'generated_kernels').mkdir(parents=True, exist_ok=True)
    (output_directory / 'generated_testcases').mkdir(parents=True,
                                                     exist_ok=True)

    logging.info('Preparing test case generator.')
    generator = clgen.ClgenGenerator(generator_config)

    # Generate testcases.
    logging.info('Generating %d testcases ...', num_testcases)
    req = generator_pb2.GenerateTestcasesRequest()
    req.num_testcases = num_testcases
    res = generator.GenerateTestcases(req, None)

    for testcase in res.testcases:
        # Write kernel to file.
        kernel = testcase.inputs['src']
        kernel_id = crypto.md5_str(kernel)
        with open(output_directory / 'generated_kernels' / f'{kernel_id}.cl',
                  'w') as f:
            f.write(kernel)

        # Write testcase to file.
        testcase_id = crypto.md5_str(str(testcase))
        pbutil.ToFile(
            testcase,
            output_directory / 'generated_testcases' / f'{testcase_id}.pbtxt')

    logging.info('%d testcases written to %s', num_testcases,
                 output_directory / 'generated_testcases')
    generation_times = [
        testcase.profiling_events[0].duration_ms for testcase in res.testcases
    ]
    logging.info('Average time to generate testcase: %.2f ms',
                 sum(generation_times) / len(generation_times))
Beispiel #4
0
def RunBatch(
  generator: base_generator.GeneratorServiceBase,
  dut_harness: base_harness.HarnessBase,
  gs_harness: base_harness.HarnessBase,
  filters: difftests.FiltersBase,
  batch_size: int,
) -> typing.List[deepsmith_pb2.Result]:
  """Run one batch of testing.

  A batch of testing involves generating a set of testcases, executing them on
  the device under test, then determining for each whether the result is
  interesting. The interesting-ness test may involve executing testcases on the
  gold-standard device and comparing the outputs.

  Args:
    generator: The generator for testcases.
    dut_harness: The device under test.
    gs_harness: The gold-standard device, used to compare outputs against the
      device under test.
    filters: A testcase filters instance.
    batch_size: The number of testcases to generate and evaluate.

  Returns:
    A list of results which are determined to be interesting.
  """
  # Our differential testers and result filters.
  unary_difftester = difftests.UnaryTester()
  gs_difftester = difftests.GoldStandardDiffTester(
    difftests.NamedOutputIsEqual("stdout")
  )

  interesting_results = []

  # Generate testcases.
  app.Log(1, "Generating %d testcases ...", batch_size)
  req = generator_pb2.GenerateTestcasesRequest()
  req.num_testcases = batch_size
  res = generator.GenerateTestcases(req, None)
  testcases = [
    testcase for testcase in res.testcases if filters.PreExec(testcase)
  ]
  if len(res.testcases) - len(testcases):
    app.Log(
      1,
      "Discarded %d testcases prior to execution.",
      len(res.testcases) - len(testcases),
    )

  # Evaluate testcases.
  app.Log(
    1,
    "Evaluating %d testcases on %s ...",
    len(testcases),
    dut_harness.testbeds[0].opts["platform"][:12],
  )
  unfiltered_results = RunTestcases(dut_harness, testcases)
  results = [
    result for result in unfiltered_results if filters.PostExec(result)
  ]
  if len(unfiltered_results) - len(results):
    app.Log(1, "Discarded %d results.", len(unfiltered_results) - len(results))

  for i, result in enumerate(results):
    interesting_result = ResultIsInteresting(
      result, unary_difftester, gs_difftester, gs_harness, filters
    )
    if interesting_result:
      interesting_results.append(interesting_result)

  return interesting_results