def main(arg_list : List[str]) -> None: global predictor parser = argparse.ArgumentParser(description= "Produce an html report from the scrape file.") parser.add_argument("-j", "--threads", default=16, type=int) parser.add_argument("--prelude", default=".", type=Path2) parser.add_argument("--verbose", "-v", help="verbose output", action='store_const', const=True, default=False) parser.add_argument("--progress", "-P", help="show progress of files", action='store_const', const=True, default=False) parser.add_argument("--debug", default=False, const=True, action='store_const') parser.add_argument("--output", "-o", help="output data folder name", default="static-report", type=Path2) parser.add_argument("--message", "-m", default=None) parser.add_argument('--context-filter', dest="context_filter", type=str, default=None) parser.add_argument('--chunk-size', dest="chunk_size", type=int, default=4096) parser.add_argument('--weightsfile', default=None) parser.add_argument('--predictor', choices=list(static_predictors.keys()), default=None) parser.add_argument("--num-predictions", dest="num_predictions", type=int, default=3) parser.add_argument('--skip-nochange-tac', default=False, const=True, action='store_const', dest='skip_nochange_tac') parser.add_argument('filenames', nargs="+", help="proof file name (*.v)", type=Path2) args = parser.parse_args(arg_list) cur_commit = subprocess.check_output(["git show --oneline | head -n 1"], shell=True).decode('utf-8').strip() cur_date = datetime.datetime.now() if args.weightsfile: predictor = loadPredictorByFile(args.weightsfile) elif args.predictor: predictor = loadPredictorByName(args.predictor) else: print("You must specify either --weightsfile or --predictor!") parser.print_help() return if not args.output.exists(): args.output.makedirs() context_filter = args.context_filter or dict(predictor.getOptions())["context_filter"] with multiprocessing.pool.ThreadPool(args.threads) as pool: file_results = \ list((stats for stats in pool.imap_unordered(functools.partial(report_file, args, predictor.training_args, context_filter), args.filenames) if stats)) write_summary(args, predictor.getOptions() + [("report type", "static"), ("predictor", args.predictor)], cur_commit, cur_date, file_results)
def get_predictor(parser: argparse.ArgumentParser, args: argparse.Namespace) -> TacticPredictor: predictor: TacticPredictor if args.weightsfile: predictor = loadPredictorByFile(args.weightsfile) elif args.predictor: predictor = loadPredictorByName(args.predictor) else: print("You must specify either --weightsfile or --predictor!") parser.print_help() sys.exit(1) return predictor
def main(arg_list: List[str]) -> None: global jobs global num_jobs global net global gresult parser = argparse.ArgumentParser( description="try to match the file by predicting a tactic") parser.add_argument('-j', '--threads', default=16, type=int) parser.add_argument('--prelude', default=".", type=Path2) parser.add_argument('--debug', default=False, const=True, action='store_const') parser.add_argument("--verbose", "-v", help="verbose output", action='store_const', const=True, default=False) parser.add_argument("--progress", "-P", help="show progress of files", action='store_const', const=True, default=False) parser.add_argument('-o', '--output', help="output data folder name", default="report", type=Path2) parser.add_argument('-m', '--message', default=None) parser.add_argument( '--baseline', help="run in baseline mode, predicting {} every time".format( baseline_tactic), default=False, const=True, action='store_const') parser.add_argument('--context-filter', dest="context_filter", type=str, default=None) parser.add_argument('--weightsfile', default=None) parser.add_argument('--predictor', choices=list(static_predictors.keys()), default=None) parser.add_argument('--skip-nochange-tac', default=False, const=True, action='store_const', dest='skip_nochange_tac') parser.add_argument('filenames', nargs="+", help="proof file name (*.v)", type=Path2) args = parser.parse_args(arg_list) coqargs = ["sertop", "--implicit"] includes = subprocess.Popen( ['make', '-C', str(args.prelude), 'print-includes'], stdout=subprocess.PIPE).communicate()[0].decode('utf-8') # Get some metadata cur_commit = subprocess.check_output(["git show --oneline | head -n 1"], shell=True).decode('utf-8').strip() cur_date = datetime.datetime.now() if not args.output.exists(): args.output.makedirs() jobs = queue.Queue() workers = [] num_jobs = len(args.filenames) for infname in args.filenames: jobs.put(infname) args.threads = min(args.threads, len(args.filenames)) if args.weightsfile: net = loadPredictorByFile(args.weightsfile) elif args.predictor: net = loadPredictorByName(args.predictor) else: print("You must specify either --weightsfile or --predictor!") parser.print_help() return gresult = GlobalResult(net.getOptions()) context_filter = args.context_filter or dict( net.getOptions())["context_filter"] for idx in range(args.threads): worker = Worker(idx, coqargs, includes, args.output, args.prelude, args.debug, num_jobs, args.baseline, args.skip_nochange_tac, context_filter, args) worker.start() workers.append(worker) for idx in range(args.threads): finished_id = finished_queue.get() workers[finished_id].join() print("Thread {} finished ({} of {}).".format(finished_id, idx + 1, args.threads)) write_summary(args.output, num_jobs, cur_commit, args.message, args.baseline, cur_date, gresult)