def benchmark_generator(**kwargs): do_generator_task(**kwargs)
def main(argv): if len(argv) > 1: raise app.UsageError('Too many command-line arguments.') if FLAGS.simulate and not FLAGS.codegen: raise app.UsageError( 'Must specify --codegen when --simulate is given.') # Test that we can write to the crash and summary path. for path in (FLAGS.crash_path, FLAGS.summary_path): if path: gfile.make_dirs(path) with gfile.open(os.path.join(path, 'test'), 'w') as f: print('test', file=f) start = datetime.datetime.now() physical_core_count = psutil.cpu_count(logical=False) worker_count = FLAGS.worker_count or physical_core_count worker_count = max(worker_count, 1) # Need at least one worker. queues = (multiprocess.get_user_data() or [mp.Queue() for _ in range(worker_count)]) queues = queues[:worker_count] print('-- Creating pool of {} workers; physical core count {}'.format( worker_count, physical_core_count)) workers = [] for i in range(worker_count): queue = None if multiprocess.has_user_data_support() else queues[i] target = run_fuzz_multiprocess.do_worker_task args = (i, queue, FLAGS.crash_path, FLAGS.summary_path, FLAGS.save_temps_path, FLAGS.minimize_ir) worker = multiprocess.Process(target=target, args=args) worker.start() workers.append(worker) duration_str = FLAGS.duration duration = None if duration_str is None else cli_helpers.parse_duration( duration_str) seed = FLAGS.seed if not seed: seed = random.randrange(0, 1 << 31) print('-- Using randomly generated seed:', seed) sys.stdout.flush() generator_options = ast_generator.AstGeneratorOptions( disallow_divide=FLAGS.disallow_divide, emit_loops=FLAGS.emit_loops, short_samples=FLAGS.short_samples, max_width_bits_types=FLAGS.max_width_bits_types, max_width_aggregate_types=FLAGS.max_width_aggregate_types, emit_gate=not FLAGS.codegen) default_sample_options = sample.SampleOptions( convert_to_ir=True, optimize_ir=True, use_jit=FLAGS.use_llvm_jit, codegen=FLAGS.codegen, simulate=FLAGS.simulate, simulator=FLAGS.simulator, use_system_verilog=FLAGS.use_system_verilog) sample_count = run_fuzz_multiprocess.do_generator_task( queues, seed, generator_options, FLAGS.sample_count, FLAGS.calls_per_sample, default_sample_options=default_sample_options, duration=duration, print_samples=FLAGS.print_samples) for i, worker in enumerate(workers): print('-- Joining on worker {}'.format(i)) worker.join() delta = datetime.datetime.now() - start elapsed = delta.total_seconds() print( '-- Elapsed end-to-end: {} = {:.2f} seconds; {:,} samples; {:.2f} samples/s' .format(delta, elapsed, sample_count, sample_count / elapsed))