def test_read_then_write(self):
    p = package.Package('the_package')

    temp_dir = tempfile.TemporaryDirectory()
    results_path = os.path.join(temp_dir.name, 'results.textproto')
    results = lec_characterizer_pb2.LecTiming()
    results.ir_function = 'single_op_OP_ADD'

    # Add one un-touched test case, and add one that should be appended to.
    proto_byte = xls_type_pb2.TypeProto()
    proto_byte.type_enum = xls_type_pb2.TypeProto.BITS
    proto_byte.bit_count = 8
    proto_short = xls_type_pb2.TypeProto()
    proto_short.type_enum = xls_type_pb2.TypeProto.BITS
    proto_short.bit_count = 16

    test_case = results.test_cases.add()
    param = test_case.function_type.parameters.add()
    param.CopyFrom(proto_short)
    param = test_case.function_type.parameters.add()
    param.CopyFrom(proto_short)
    test_case.function_type.return_type.CopyFrom(proto_short)

    test_case = results.test_cases.add()
    param = test_case.function_type.parameters.add()
    param.CopyFrom(proto_byte)
    param = test_case.function_type.parameters.add()
    param.CopyFrom(proto_byte)
    test_case.function_type.return_type.CopyFrom(proto_byte)
    test_case.exec_times_us.extend([1, 3, 7])
    test_case.average_us = 3

    with gfile.open(results_path, 'w') as f:
      f.write(text_format.MessageToString(results))

    num_iters = 16
    byte_type = p.get_bits_type(8)
    self.lc.run(
        op=op_pb2.OpProto.OP_ADD,
        samples=[([byte_type, byte_type], byte_type)],
        num_iters=num_iters,
        cell_library_textproto=self.cell_lib_text,
        results_path=results_path,
        lec_fn=lambda a, b, c, d: True)

    results = lec_characterizer_pb2.LecTiming()
    with gfile.open(results_path, 'r') as f:
      text_format.Parse(f.read(), results)

    self.assertEqual(results.ir_function, 'single_op_OP_ADD')
    self.assertLen(results.test_cases, 2)
    for test_case in results.test_cases:
      if test_case.function_type.return_type.bit_count == 16:
        self.assertEmpty(test_case.exec_times_us)
      else:
        self.assertLen(test_case.exec_times_us, 3 + num_iters)
def csv_solvers_speeds_bits_list(op, nests_val, bits_list, solvers, fname):
  """Create and test smt2 files for each proof and write the data to a csv file.

  Create smt2 files for each of the bitvector lengths in bits_list with the
  given operation and number of nested operations, get the average speeds of
  each of the solvers on these proofs, and store the data in a csv file with
  fname.

  Args:
    op: A string, the operation to test
    nests_val: An integer, the number of nested operations
    bits_list: A list of integers, the input bitvector length for each proof
    solvers: A list of strings, the solvers to test
    fname: The name of the file to store the data in
  """
  files = create_and_get_smt_files_bits_list(op, nests_val, bits_list)
  solvers_milliseconds = solvers_op_comparison_functions.get_solver_speeds_ms(
      solvers, files)
  write_row = False if os.path.isfile(fname) else True
  with gfile.open(fname, "a") as f:
    wr = csv.writer(f, delimiter=",")
    if write_row:
      wr.writerow(["bits_list"] + bits_list)
    for i in range(len(solvers)):
      wr.writerow([solvers[i]] + solvers_milliseconds[i])
def record_crasher(workerno: int, sampleno: int, minimize_ir: bool,
                   sample: Sample, run_dir: Text, crash_path: Text,
                   num_crashers: int, error_message: str):
  """Records and writes details of a failing test as a crasher."""
  print('--- Worker {} observed an exception for sampleno {}'.format(
      workerno, sampleno))

  # Try to prune down the IR to a minimal reproducer.
  if minimize_ir:
    print('--- Worker {} attempting to minimize IR'.format(workerno))
    minimized_ir_path = run_fuzz.minimize_ir(sample, run_dir)
    if minimized_ir_path:
      print('--- Worker {} minimized IR saved in {}'.format(
          workerno, os.path.basename(minimized_ir_path)))
    else:
      print('--- Worker {} unable to minimize IR'.format(workerno))

  # Create a directory under crash_path containing the entire contents of
  # the run directory along with a crasher file. Name of directory is the
  # first eight characters of the hash of the code sample.
  digest = hashlib.sha256(sample.input_text.encode('utf-8')).hexdigest()[:8]
  sample_crasher_dir = os.path.join(crash_path, digest)
  termcolor.cprint(
      '--- Worker {} noted crasher #{} for sampleno {} in {}'.format(
          workerno, num_crashers, sampleno, sample_crasher_dir),
      color='red')
  sys.stdout.flush()
  gfile.recursively_copy_dir(
      run_dir, sample_crasher_dir, preserve_file_mask=True)
  crasher_path = os.path.join(
      sample_crasher_dir,
      'crasher_{}_{}.x'.format(datetime.date.today().strftime('%Y-%m-%d'),
                               digest[:4]))
  with gfile.open(crasher_path, 'w') as f:
    f.write(sample.to_crasher(error_message))
Exemple #4
0
def main(argv):
    if len(argv) != 2:
        raise app.UsageError(
            'Invalid command-line arguments; want {} <crasher path>'.format(
                argv[0]))

    with gfile.open(argv[1], 'r') as f:
        smp = sample.Sample.from_crasher(f.read())
    if FLAGS.simulator:
        smp = smp._replace(options=smp.options._replace(
            simulator=FLAGS.simulator))

    run_dir = FLAGS.run_dir if FLAGS.run_dir else tempfile.mkdtemp(
        'run_crasher_')

    print(f'Running crasher in directory {run_dir}')
    try:
        run_fuzz.run_sample(smp, run_dir)
    except sample_runner.SampleError:
        print('FAILURE')
        return 1

    print('SUCCESS')
    if not FLAGS.run_dir:
        # Remove the directory if it is temporary.
        shutil.rmtree(run_dir)
    return 0
def create_and_get_smt_files_nests_list(op, nests_list, bits_val):
  """Creates SMTLIB2 files for the necessary proof and return them in a list.

  Given an operation, a list of nest values, and the number of bits, create smt2
  files for each proof, and return them in a list.

  Args:
    op: A string, the operation to test ('add', 'mul', or 'shl').
    nests_list: A list of integers, the number of nested operations for each
      proof.
    bits_val: An integer, the input bitvector length.

  Returns:
    The list of generated SMTLIB2 files.
  """
  if op not in ['add', 'mul', 'shl']:
    raise ValueError('op argument is not a valid operation')
  files = []
  for nest in nests_list:
    with gfile.open(f'{op}{nest}_2x{bits_val}.smt2', 'w+') as f:
      files.append(f)
      if op == 'add':
        n_bit_nested_add_generator.n_bit_nested_add_existing_file(
            bits_val, nest, f)
      elif op == 'mul':
        n_bit_nested_mul_generator.n_bit_nested_mul_existing_file(
            bits_val, nest, f)
      elif op == 'shl':
        n_bit_nested_shift_generator.n_bit_nested_shift_existing_file(
            bits_val, nest, f)
  return files
  def test_lec_smoke(self):
    p = package.Package('the_package')

    temp_dir = tempfile.TemporaryDirectory()
    results_path = os.path.join(temp_dir.name, 'results.textproto')

    num_iters = 16

    byte_type = p.get_bits_type(8)
    self.lc.run(
        op=op_pb2.OpProto.OP_ADD,
        samples=[([byte_type, byte_type], byte_type)],
        num_iters=num_iters,
        cell_library_textproto=self.cell_lib_text,
        results_path=results_path,
        lec_fn=lambda a, b, c, d: True)

    # Open results, verify contents
    results = lec_characterizer_pb2.LecTiming()
    with gfile.open(results_path, 'r') as f:
      text_format.Parse(f.read(), results)

    self.assertEqual(results.ir_function, 'single_op_OP_ADD')
    self.assertLen(results.test_cases, 1)
    test_case = results.test_cases[0]
    self.assertLen(test_case.exec_times_us, num_iters)
Exemple #7
0
def plot_csv_data(fname):
    """Plots the contents of the csv file fname.

  Given a csv file, takes the first row as the values for the x-axis, and each
  of the following rows as data for a solver. Assumes the first element in each
  row is the name of the solver.

  Args:
    fname: The name of the file containing the data to plot.
  """
    with gfile.open(fname, 'r') as csv_file:
        csv_reader = csv.reader(csv_file, delimiter=',')
        line_count = 0
        for row in csv_reader:
            if line_count == 0:
                x_axis = [int(elm) for elm in row[1:]]
                if row[0] == 'bits_list':
                    xlabel = 'Bit Count'
                elif row[0] == 'nests_list':
                    xlabel = 'Nested Ops'
            else:
                data = [float(elm) for elm in row[1:]]
                label = row[0]
                plt.scatter(x_axis, data, label=label)
                print(f'row[1:] = {row[1:]}, label = {row[0]}')
            line_count += 1
        if FLAGS.xscale == 'log':
            plt.xscale('log', basex=2)
        plt.xlabel(xlabel)
        if FLAGS.yscale == 'log':
            plt.yscale('log', basey=10)
        plt.ylabel('Solve Time (ms)')
        plt.legend()
        plt.show()
def csv_solvers_speeds_nests_list(op, nests_list, bits_val, solvers, fname):
  """Creates and tests SMTLIB2 files for each proof and writes the result.

  Create smt2 files for each of the nest values in nests_list with the
  given operation and the input bitvector length, get the average speeds of
  each of the solvers on these proofs, and store the data in a csv file with
  fname.

  Args:
    op: A string, the operation to test.
    nests_list: A list of integers, the number of nested operations for each
      proof.
    bits_val: An integer, the input bitvector length.
    solvers: A list of strings, the solvers to test.
    fname: The name of the file to store the data in.
  """
  files = create_and_get_smt_files_nests_list(op, nests_list, bits_val)
  solvers_milliseconds = solvers_op_comparison_functions.get_solver_speeds_ms(
      solvers, files)
  write_row = False if os.path.isfile(fname) else True
  with gfile.open(fname, 'a') as f:
    wr = csv.writer(f, delimiter=',')
    if write_row:
      wr.writerow(['nests_list'] + nests_list)
    for i in range(len(solvers)):
      wr.writerow([solvers[i]] + solvers_milliseconds[i])
def create_and_get_smt_files_bits_list(op, nests_val, bits_list):
  """Creates smt2 files for the necessary proof and return them in a list.

  Given an operation, the number of nests, and a list of bits, create SMTLIB2
  files for each proof, and return them in a list.

  Args:
    op: A string, the operation to test ('add', 'mul', or 'shl')
    nests_val: An integer, the number of nested operations
    bits_list: A list of integers, the input bitvector length for each proof

  Returns:
    The generated SMTLIB2 files.
  """
  if op not in ["add", "mul", "shl"]:
    raise ValueError("op argument is not a valid operation")
  files = []
  for bits in bits_list:
    with gfile.open(f"{op}{nests_val}_2x{bits}.smt2", "w+") as f:
      files.append(f)
      if op == "add":
        n_bit_nested_add_generator.n_bit_nested_add_existing_file(
            bits, nests_val, f)
      elif op == "mul":
        n_bit_nested_mul_generator.n_bit_nested_mul_existing_file(
            bits, nests_val, f)
      elif op == "shl":
        n_bit_nested_shift_generator.n_bit_nested_shift_existing_file(
            bits, nests_val, f)
  return files
Exemple #10
0
def n_bit_mul_new_file(n):
    """Create a new file, and write a multiplication proof with n-bit arguments.

  Args:
  n: An integer, the number of bits for the input and output bitvectors.
  """
    with gfile.open(f"mul_2x{n}.smt2", "w+") as f:
        n_bit_mul_existing_file(n, f)
Exemple #11
0
def n_bit_nested_mul_new_file(n, muls):
    """Makes a new file and write an n-bit multiplication [chain] proof.

  Args:
    n: An integer, the number of bits in each bitvector.
    muls: An integer, the number of nested multiplication operations.
  """
    with gfile.open(f"mul{muls}_2x{n}.smt2", "w+") as f:
        n_bit_nested_mul_existing_file(n, muls, f)
Exemple #12
0
def n_bit_nested_add_new_file(n, adders):
    """Make a new file and write an n-bit addition proof.

  Args:
    n: An integer, the number of bits in each bitvector.
    adders: An integer, the number of nested addition operations.
  """
    with gfile.open(f"add{adders}_2x{n}.smt2", "w") as f:
        n_bit_nested_add_existing_file(n, adders, f)
def n_bit_nested_shift_new_file(n, shifts):
    """Makes a new file and writes an n-bit shift proof.

  Args:
    n: An integer, the number of bits in each bitvector.
    shifts: An integer, the number of nested shift operations.
  """
    with gfile.open(f"shift{shifts}_2x{n}.smt2", "w") as f:
        n_bit_nested_shift_existing_file(n, shifts, f)
Exemple #14
0
  def setUp(self):
    super().setUp()
    server_path = runfiles.get_path('xls/synthesis/dummy_synthesis_server_main')
    self.port = portpicker.pick_unused_port()
    self.lc = lec_characterizer.LecCharacterizer(
        [server_path, '--port={}'.format(self.port)], self.port)

    cell_lib_path = runfiles.get_path(self._CELL_LIBRARY_PATH)
    with gfile.open(cell_lib_path, 'r') as f:
      self.cell_lib_text = f.read()
Exemple #15
0
def main(argv):
    if len(argv) > 3:
        raise app.UsageError('Too many command-line arguments.')

    # Read in the results file to see what configs to test.
    results = lc_pb2.LecTiming()
    if FLAGS.results_path and gfile.exists(FLAGS.results_path):
        with gfile.open(FLAGS.results_path, 'r') as fd:
            results = text_format.ParseLines(fd, lc_pb2.LecTiming())

    with gfile.open(FLAGS.cell_library_textproto_path, 'r') as fd:
        cell_library_textproto = fd.read()

    lc = lc_mod.LecCharacterizer(FLAGS.synthesis_server_address)

    for width in FLAGS.widths:
        bits_type = xls_type_pb2.TypeProto(
            type_enum=xls_type_pb2.TypeProto.BITS, bit_count=int(width))

        function_type = xls_type_pb2.FunctionTypeProto()
        function_type.parameters.add().CopyFrom(bits_type)
        function_type.parameters.add().CopyFrom(bits_type)
        function_type.return_type.CopyFrom(bits_type)

        test_case = None
        for result_case in results.test_cases:
            # Find or create a matching test case for this function type.
            if result_case.function_type == function_type:
                test_case = result_case

        if test_case is None:
            test_case = results.test_cases.add()
            test_case.function_type.CopyFrom(function_type)

        runs_left = FLAGS.runs_per_type - len(test_case.exec_times_us)
        if runs_left > 0:
            lc.run(results, op_pb2.OpProto.Value(FLAGS.op), function_type,
                   int(runs_left), cell_library_textproto, z3_lec.run,
                   _save_results)
Exemple #16
0
    def run(self,
            op: op_pb2.OpProto,
            samples: List[Tuple[List[type_mod.Type], type_mod.Type]],
            num_iters: int,
            cell_library_textproto: str,
            results_path: str,
            lec_fn: Callable[[str, str, str, str], bool] = z3_lec.run) -> bool:
        """Characterizes LEC timing across a set of data types.

    This function iterates over the input samples (collections of arg types),
    creates IR and a netlist for each, and sends them to _run_sample()
    to execute.

    Args:
      op: The IR operator to characterize.
      samples: A list of ([Arg type], Return type) tuples, each of which
        represents the input and output types for a sample to run.
      num_iters: The number of iterations to run for each sample.
      cell_library_textproto: Text-format proto containing the netlist's cell
        library.
      results_path: Path to output the results proto. If this file already
        exists, then we append the results of this execution to its contents.
        execution.
      lec_fn: The function to execute for timing information. Takes in the IR
        text, the netlist text, the name of the netlist module to compare, and
        the cell library textproto. Returns True if the IR and netlist are
        proved to be equivalent.

    Returns:
      True if the generated IR and netlist are proved equivalent, and False
      otherwise.
    """
        results = lec_characterizer_pb2.LecTiming()
        if gfile.exists(results_path):
            with gfile.open(results_path, 'r') as f:
                text_format.Parse(f.read(), results)
        else:
            results.ir_function = 'single_op_' + op_pb2.OpProto.Name(op)

        for (operand_types, output_type) in samples:
            ir_text, netlist_text = self._generate_sources(
                op, operand_types, output_type)

            if not self._run_sample(ir_text, netlist_text, num_iters,
                                    cell_library_textproto, results,
                                    results_path, lec_fn):
                return False

        return True
Exemple #17
0
  def setUp(self):
    super().setUp()
    server_path = runfiles.get_path('xls/synthesis/dummy_synthesis_server_main')
    self._port = portpicker.pick_unused_port()
    self._synthesis_server = subprocess.Popen(
        [server_path, '--port={}'.format(self._port)], self._port)

    cell_lib_path = runfiles.get_path(self._CELL_LIBRARY_PATH)
    with gfile.open(cell_lib_path, 'r') as f:
      self._cell_lib_text = f.read()

    self._lc = lec_characterizer.LecCharacterizer('localhost:{}'.format(
        self._port))

    self._byte_type = xls_type_pb2.TypeProto(
        type_enum=xls_type_pb2.TypeProto.BITS, bit_count=8)
def save_checkpoint(model: delay_model_pb2.DelayModel, checkpoint_path: str):
    if checkpoint_path:
        with gfile.open(checkpoint_path, 'w') as f:
            f.write(text_format.MessageToString(model))
def main(argv):
  if len(argv) > 1:
    raise app.UsageError('Too many command-line arguments.')

  if FLAGS.simulate and not FLAGS.codegen:
    raise app.UsageError('Must specify --codegen when --simulate is given.')

  # Test that we can write to the crash and summary path.
  for path in (FLAGS.crash_path, FLAGS.summary_path):
    if path:
      gfile.make_dirs(path)
      with gfile.open(os.path.join(path, 'test'), 'w') as f:
        print('test', file=f)

  start = datetime.datetime.now()

  physical_core_count = psutil.cpu_count(logical=False)
  worker_count = FLAGS.worker_count or physical_core_count
  worker_count = max(worker_count, 1)  # Need at least one worker.
  queues = (multiprocess.get_user_data() or
            [mp.Queue() for _ in range(worker_count)])
  queues = queues[:worker_count]
  print('-- Creating pool of {} workers; physical core count {}'.format(
      worker_count, physical_core_count))
  workers = []
  for i in range(worker_count):
    queue = None if multiprocess.has_user_data_support() else queues[i]

    target = run_fuzz_multiprocess.do_worker_task
    args = (i, queue, FLAGS.crash_path, FLAGS.summary_path,
            FLAGS.save_temps_path, FLAGS.minimize_ir)

    worker = multiprocess.Process(target=target, args=args)

    worker.start()
    workers.append(worker)

  duration_str = FLAGS.duration
  duration = None if duration_str is None else cli_helpers.parse_duration(
      duration_str)

  seed = FLAGS.seed
  if not seed:
    seed = random.randrange(0, 1 << 31)
    print('-- Using randomly generated seed:', seed)
    sys.stdout.flush()

  generator_options = ast_generator.AstGeneratorOptions(
      disallow_divide=FLAGS.disallow_divide,
      emit_loops=FLAGS.emit_loops,
      short_samples=FLAGS.short_samples,
      max_width_bits_types=FLAGS.max_width_bits_types,
      max_width_aggregate_types=FLAGS.max_width_aggregate_types)

  default_sample_options = sample.SampleOptions(
      convert_to_ir=True,
      optimize_ir=True,
      use_jit=FLAGS.use_llvm_jit,
      codegen=FLAGS.codegen,
      simulate=FLAGS.simulate,
      simulator=FLAGS.simulator,
      use_system_verilog=FLAGS.use_system_verilog)
  sample_count = run_fuzz_multiprocess.do_generator_task(
      queues,
      seed,
      generator_options,
      FLAGS.sample_count,
      FLAGS.calls_per_sample,
      default_sample_options=default_sample_options,
      duration=duration,
      print_samples=FLAGS.print_samples)

  for i, worker in enumerate(workers):
    print('-- Joining on worker {}'.format(i))
    worker.join()

  delta = datetime.datetime.now() - start
  elapsed = delta.total_seconds()
  print(
      '-- Elapsed end-to-end: {} = {:.2f} seconds; {:,} samples; {:.2f} samples/s'
      .format(delta, elapsed, sample_count, sample_count / elapsed))
Exemple #20
0
    def _run_sample(
            self,
            ir_text: str,
            netlist_text: str,
            num_iters: int,
            cell_library_textproto: str,
            results: lec_characterizer_pb2.LecTiming,
            results_path: str,
            lec_fn: Callable[[str, str, str, str], bool] = z3_lec.run) -> bool:
        """Executes LEC for a single IR/netlist pair.

    Args:
      ir_text: The input IR to lec_fn.
      netlist_text: The input netlist to lec_fn.
      num_iters: The number of iterations to run for each sample.
      cell_library_textproto: Text-format proto containing the netlist's cell
        library.
      results: The LecTiming proto for this entire run.
      results_path: Path to which to write output the results proto.
      lec_fn: The function to execute for timing information. Takes in the IR
        text, the netlist text, the name of the netlist module to compare, and
        the cell library textproto. Returns True if the IR and netlist are
        proved to be equivalent.

    Returns:
      True if the generated IR and netlist are proved equivalent, and False
      otherwise.
    """
        # Get or create the test case [proto] of interest.
        package = ir_parser.Parser.parse_package(ir_text)
        function_type = package.get_function(self._FUNCTION_NAME).get_type()

        test_case = None

        function_type_textproto = function_type.to_textproto()
        for result_case in results.test_cases:
            # As a reminder: we can't pass proper protos over the pybind11 boundary,
            # so it's simpler to compare textprotos.
            result_function_type_textproto = text_format.MessageToString(
                result_case.function_type)
            if result_function_type_textproto == function_type_textproto:
                test_case = result_case
                break

        if test_case is None:
            test_case = results.test_cases.add()
            text_format.Parse(function_type.to_textproto(),
                              test_case.function_type)

        for _ in range(num_iters):
            start_time = time.monotonic()
            are_equal = lec_fn(ir_text, netlist_text, self._MODULE_NAME,
                               cell_library_textproto)
            if not are_equal:
                logging.error('Bad comparison: ir: %s, netlist: %s', ir_text,
                              netlist_text)
                return False

            duration = time.monotonic() - start_time
            test_case.exec_times_us.append(int(duration * 1000000))

            total_time = 0
            for exec_time in test_case.exec_times_us:
                total_time += exec_time
            test_case.average_us = int(total_time /
                                       len(test_case.exec_times_us))

            # Some tests could be long-running, so write after every iter for safety.
            with gfile.open(results_path, 'w') as f:
                f.write(text_format.MessageToString(results))
Exemple #21
0
def _save_results(results: lc_pb2.LecTiming) -> None:
    """Callback to save the results proto after every LEC."""
    with gfile.open(FLAGS.results_path, 'w') as fd:
        fd.write(text_format.MessageToString(results))
Exemple #22
0
def parse_text_proto_file(proto_path: str, output: message.Message) -> None:
    """Parses a text-format proto at the given path into the given message."""
    with gfile.open(proto_path, 'r') as f:
        proto_text = f.read()
    text_format.Parse(proto_text, output)
def do_worker_task(workerno: int,
                   queue: Optional[mp.Queue],
                   crash_path: Text,
                   summary_path: Optional[Text] = None,
                   save_temps_path: Optional[Text] = None,
                   minimize_ir: bool = True) -> None:
  """Runs worker task, receiving commands from generator and executing them."""
  queue = queue or multiprocess.get_user_data()[workerno]
  crashers = 0
  calls = 0
  print('---- Started worker {}'.format(workerno))
  sys.stdout.flush()
  start = datetime.datetime.now()

  # Local file to write the summary information to before writing out to the
  # potentially remote (i.e. CNS) summary file. Avoids a potential CNS write
  # with every sample. Instead data is written out in batches.
  summary_file = os.path.join(summary_path, 'summary_%d.binarypb' %
                              workerno) if summary_path else None
  summary_temp_file = tempfile.mkstemp(
      prefix='temp_summary_')[1] if summary_path else None

  i = 0  # Silence pylint warning.
  for i in itertools.count():
    message = queue.get()
    if message.command == Command.STOP:
      break
    assert message.command == Command.RUN, message.command
    calls += len(message.sample.args_batch)
    run_dir = None
    if save_temps_path:
      run_dir = os.path.join(save_temps_path, str(message.sampleno))
      os.makedirs(run_dir)
    else:
      run_dir = tempfile.mkdtemp(prefix='run_fuzz_')

    try:
      run_fuzz.run_sample(
          message.sample,
          run_dir,
          summary_file=summary_temp_file,
          generate_sample_ns=message.generate_sample_ns)
    except sample_runner.SampleError as e:
      crashers += 1
      record_crasher(workerno, message.sampleno, minimize_ir, message.sample,
                     run_dir, crash_path, crashers, str(e))

    if summary_file and i % 25 == 0:
      # Append the local temporary summary file to the actual, potentially
      # remote one, and delete the temporary file.
      with gfile.open(summary_temp_file, 'rb') as f:
        summaries = f.read()
      with gfile.open(summary_file, 'ab+') as f:
        f.write(summaries)
      gfile.remove(summary_temp_file)

    if not save_temps_path:
      shutil.rmtree(run_dir)

    # TODO(leary): 2020-08-28 Turn this into an option.
    if i != 0 and i % 16 == 0:
      elapsed = (datetime.datetime.now() - start).total_seconds()
      print('---- Worker {:3}: {:8.2f} samples/s {:8.2f} calls/s'.format(
          workerno, i / elapsed, calls / elapsed))
      sys.stdout.flush()

  elapsed = (datetime.datetime.now() - start).total_seconds()
  print(
      '---- Worker {:3} finished! {:3} crashers; {:8.2f} samples/s; {:8.2f} calls/s'
      .format(workerno, crashers, i / elapsed, calls / elapsed))
  sys.stdout.flush()