def add_to_script(self, script, data_loader_name): script.add_import(imports=["Comparator"], frm="polygraphy.comparator") script.add_import(imports=["sys"]) RESULTS_VAR_NAME = Inline("results") comparator_run = Script.invoke("Comparator.run", script.get_runners(), warm_up=self.warm_up, data_loader=data_loader_name, use_subprocess=self.use_subprocess, save_inputs_path=self.save_inputs) script.append_suffix( Script.format_str("\n# Runner Execution\n{results} = {:}", Inline(comparator_run), results=RESULTS_VAR_NAME)) if self.save_results: G_LOGGER.verbose("Will save runner results to: {:}".format( self.save_results)) script.add_import(imports=["misc"], frm="polygraphy.util") script.append_suffix( Script.format_str( "\n# Save results\nmisc.pickle_save({:}, {results})", self.save_results, results=RESULTS_VAR_NAME)) return RESULTS_VAR_NAME
def add_trt_config_loader(script, args, data_loader_name): profiles = [] for (min_shape, opt_shape, max_shape) in args_util.get(args, "profiles"): profile_str = "Profile()" for name in min_shape.keys(): profile_str += Script.format_str(".add({:}, min={:}, opt={:}, max={:})", name, min_shape[name], opt_shape[name], max_shape[name]) profiles.append(Inline(profile_str)) if profiles: script.add_import(imports=["Profile"], frm="polygraphy.backend.trt") sep = Inline("\n{:}".format(constants.TAB)) profiles = Script.format_str("[{:}{:}\n]", sep, Inline((",{:}".format(sep)).join(profiles))) profile_name = script.add_loader(profiles, "profiles") else: profile_name = None calibrator = None if args_util.get(args, "int8"): script.add_import(imports=["DataLoader"], frm="polygraphy.comparator") script.add_import(imports=["Calibrator"], frm="polygraphy.backend.trt") calibrator = Script.invoke("Calibrator", data_loader=Inline(data_loader_name) if data_loader_name else Inline("DataLoader()"), cache=args_util.get(args, "calibration_cache")) config_loader_str = Script.invoke_if_nondefault("CreateTrtConfig", max_workspace_size=args_util.get(args, "workspace"), tf32=args_util.get(args, "tf32"), fp16=args_util.get(args, "fp16"), int8=args_util.get(args, "int8"), strict_types=args_util.get(args, "strict_types"), profiles=profile_name, calibrator=Inline(calibrator) if calibrator else None) if config_loader_str is not None: script.add_import(imports=["CreateConfig as CreateTrtConfig"], frm="polygraphy.backend.trt") config_loader_name = script.add_loader(config_loader_str, "create_trt_config") else: config_loader_name = None return config_loader_name
def add_trt_config_loader(self, script, data_loader_name): profiles = [] profile_args = tools_util.parse_profile_shapes( self.model_args.input_shapes, self.trt_min_shapes, self.trt_opt_shapes, self.trt_max_shapes) for (min_shape, opt_shape, max_shape) in profile_args: profile_str = "Profile()" for name in min_shape.keys(): profile_str += Script.format_str( ".add({:}, min={:}, opt={:}, max={:})", name, min_shape[name], opt_shape[name], max_shape[name]) profiles.append(Inline(profile_str)) if profiles: script.add_import(imports=["Profile"], frm="polygraphy.backend.trt") sep = Inline("\n{:}".format(constants.TAB)) profiles = Script.format_str( "[{:}{:}\n]", sep, Inline((",{:}".format(sep)).join(profiles))) profile_name = script.add_loader(profiles, "profiles") else: profile_name = None calibrator = None if self.int8: script.add_import(imports=["Calibrator"], frm="polygraphy.backend.trt") script.add_import(imports=["DataLoader"], frm="polygraphy.comparator") calibrator = Script.invoke( "Calibrator", data_loader=Inline(data_loader_name) if data_loader_name else Inline("DataLoader()"), cache=self.calibration_cache) config_loader_str = Script.invoke_if_nondefault( "CreateTrtConfig", max_workspace_size=self.workspace, tf32=self.tf32, fp16=self.fp16, int8=self.int8, strict_types=self.strict_types, profiles=profile_name, calibrator=Inline(calibrator) if calibrator else None) if config_loader_str is not None: script.add_import(imports=["CreateConfig as CreateTrtConfig"], frm="polygraphy.backend.trt") config_loader_name = script.add_loader(config_loader_str, "create_trt_config") else: config_loader_name = None return config_loader_name
def _make_data_loader(script): data_loader_name = Inline("data_loader") input_metadata_str = Inline( repr(self.model_args.input_shapes )) if self.model_args.input_shapes else None if input_metadata_str: script.add_import(imports=["TensorMetadata"], frm="polygraphy.common") data_loader = Script.invoke_if_nondefault( "DataLoader", seed=self.seed, iterations=self.iterations, input_metadata=input_metadata_str, int_range=self.int_range, float_range=self.float_range) if data_loader is not None: script.add_import(imports=["DataLoader"], frm="polygraphy.comparator") script.append_prefix( Script.format_str("\n# Inference Inputs Loader\n{:} = {:}", data_loader_name, Inline(data_loader))) else: data_loader_name = None return data_loader_name
def add_to_script(self, script): script.add_import(imports=["TrtLegacyRunner"], frm="polygraphy.backend.trt_legacy") G_LOGGER.warning("Legacy TensorRT runner only supports implicit batch TensorFlow/UFF, ONNX, and Caffe models") if self.model_args.model_type == "onnx": script.add_import(imports=["ParseNetworkFromOnnxLegacy"], frm="polygraphy.backend.trt_legacy") onnx_loader = self.onnx_loader_args.add_onnx_loader(script, disable_outputs=True) loader_name = script.add_loader(Script.format_str("ParseNetworkFromOnnxLegacy({:})", onnx_loader), "parse_network_from_onnx_legacy") elif self.model_args.model_type == "caffe": script.add_import(imports=["LoadNetworkFromCaffe"], frm="polygraphy.backend.trt_legacy") loader_name = script.add_loader(Script.format_str("LoadNetworkFromCaffe({:}, {:}, {:}, {:})", self.model_args.model_file, self.caffe_model, self.trt_outputs, self.batch_size), "parse_network_from_caffe") else: script.add_import(imports=["LoadNetworkFromUff"], frm="polygraphy.backend.trt_legacy") if self.model_args.model_type == "uff": script.add_import(imports=["LoadUffFile"], frm="polygraphy.backend.trt_legacy") shapes = {name: shape for name, (_, shape) in self.trt_loader_args.input_shapes.items()} loader_name = script.add_loader(Script.format_str("LoadUffFile({:}, {:}, {:})", self.model_args.model_file, misc.default_value(shapes, {}), self.trt_outputs), "load_uff_file") else: script.add_import(imports=["ConvertToUff"], frm="polygraphy.backend.trt_legacy") loader_name = script.add_loader(Script.format_str("ConvertToUff({:}, save_uff={:}, preprocessor={:})", self.tf_loader_args.add_to_script(script), self.save_uff, self.preprocessor), "convert_to_uff") loader_name = script.add_loader(Script.format_str("LoadNetworkFromUff({:}, uff_order={:})", loader_name, self.uff_order), "uff_network_loader") runner_str = Script.format_str("TrtLegacyRunner({:}, {:}, {:}, fp16={:}, tf32={:}, load_engine={:}, save_engine={:}, layerwise={:}, plugins={:})", loader_name, self.trt_loader_args.workspace, self.batch_size, self.trt_loader_args.fp16, self.trt_loader_args.tf32, self.model_args.model_file if self.model_args.model_type == "engine" else None, self.trt_runner_args.save_engine, self.trt_outputs==constants.MARK_ALL, self.trt_loader_args.plugins) runner_name = script.add_loader(runner_str, "trt_legacy_runner") script.add_runner(runner_name) return runner_name
def add_to_script(self, script): def _make_data_loader(script): data_loader_name = Inline("data_loader") input_metadata_str = Inline( repr(self.model_args.input_shapes )) if self.model_args.input_shapes else None if input_metadata_str: script.add_import(imports=["TensorMetadata"], frm="polygraphy.common") data_loader = Script.invoke_if_nondefault( "DataLoader", seed=self.seed, iterations=self.iterations, input_metadata=input_metadata_str, int_range=self.int_range, float_range=self.float_range) if data_loader is not None: script.add_import(imports=["DataLoader"], frm="polygraphy.comparator") script.append_prefix( Script.format_str("\n# Inference Inputs Loader\n{:} = {:}", data_loader_name, Inline(data_loader))) else: data_loader_name = None return data_loader_name if self.load_inputs: script.add_import(imports=["misc"], frm="polygraphy.util") data_loader_name = Inline("data_loader") script.append_prefix( Script.format_str( "# Load inputs\n{data_loader} = []\nfor input_data_path in {load_inputs}:" "\n{tab}{data_loader}.extend(misc.pickle_load(input_data_path))", data_loader=data_loader_name, load_inputs=self.load_inputs, tab=Inline(constants.TAB))) else: data_loader_name = _make_data_loader(script) script.append_prefix("") # Newline return data_loader_name
def build_script(self, args): script = Script( summary=generate_summary(self.makers[ModelArgs].model_file, args.runners, args.load_results)) self.makers[LoggerArgs].add_to_script(script) data_loader_name = self.makers[DataLoaderArgs].add_to_script(script) for runner_arg in args.runners: add_runner_func = { "tf": self.makers[TfRunnerArgs].add_to_script, "onnxrt": self.makers[OnnxrtRunnerArgs].add_to_script, "onnxtf": self.makers[OnnxtfRunnerArgs].add_to_script, "trt": lambda script: self.makers[TrtRunnerArgs].add_to_script( script, data_loader_name), "trt_legacy": self.makers[TrtLegacyArgs].add_to_script, }[runner_arg] add_runner_func(script) RESULTS_VAR_NAME = self.makers[ComparatorRunArgs].add_to_script( script, data_loader_name=data_loader_name) SUCCESS_VAR_NAME = self.makers[ComparatorCompareArgs].add_to_script( script, results_name=RESULTS_VAR_NAME) cmd_run = Inline("' '.join(sys.argv)") script.append_suffix( Script.format_str( '# Report Results\ncmd_run={cmd}\nif {success}:\n{tab}G_LOGGER.finish("PASSED | Command: {{}}".format(cmd_run))\nelse:\n{tab}G_LOGGER.error("FAILED | Command: {{}}".format(cmd_run))', cmd=cmd_run, success=SUCCESS_VAR_NAME, tab=Inline(constants.TAB))) script.append_suffix( "sys.exit(0 if {success} else 1)".format(success=SUCCESS_VAR_NAME)) return str(script)
def add_data_loader(script, args): def omit_none_tuple(tup): if all([elem is None for elem in tup]): return None return tup int_range = omit_none_tuple(tup=(args_util.get(args, "int_min"), args_util.get(args, "int_max"))) float_range = omit_none_tuple(tup=(args_util.get(args, "float_min"), args_util.get(args, "float_max"))) input_metadata_str = Inline(repr(args_util.get(args, "inputs"))) if args_util.get(args, "inputs") else None if input_metadata_str: script.add_import(imports=["TensorMetadata"], frm="polygraphy.common") data_loader = Script.invoke_if_nondefault("DataLoader", seed=args_util.get(args, "seed"), iterations=args_util.get(args, "iterations"), input_metadata=input_metadata_str, int_range=int_range, float_range=float_range) if data_loader is not None: data_loader_name = Inline("data_loader") script.add_import(imports=["DataLoader"], frm="polygraphy.comparator") script.append_prefix(Script.format_str("\n# Inference Inputs Loader\n{:} = {:}\n", data_loader_name, Inline(data_loader))) else: data_loader_name = None return data_loader_name
def add_comparator(script, args, data_loader_name, cmd_run): script.add_import(imports=["Comparator"], frm="polygraphy.comparator") script.add_import(imports=["sys"]) comparator_run = Script.invoke("Comparator.run", script.get_runners(), warm_up=args.warm_up, data_loader=data_loader_name, use_subprocess=args.use_subprocess) script.append_suffix( Script.format_str("\n# Runner Execution\nresults = {:}", Inline(comparator_run))) if args.load_results: G_LOGGER.verbose("Will load runner results from: {:}".format( args.load_results)) script.add_import(imports=["misc"], frm="polygraphy.util") script.append_suffix( Script.format_str( "\n# Load results\nfor load_output in {:}:\n{:}results.update(misc.pickle_load(load_output))", args.load_results, Inline(constants.TAB))) if args.save_results: G_LOGGER.verbose("Will save runner results to: {:}".format( args.save_results)) script.add_import(imports=["misc"], frm="polygraphy.util") script.append_suffix( Script.format_str( "\n# Save results\nmisc.pickle_save({:}, results)", args.save_results)) top_k = args_util.get(args, "top_k") if top_k is not None: script.add_import(imports=["PostprocessFunc"], frm="polygraphy.comparator") script.append_suffix( Script.format_str( "\n# Postprocessing - Apply Top-{:}\nresults = Comparator.postprocess(results, PostprocessFunc.topk_func(k={:}))", top_k, top_k)) script.append_suffix("\nsuccess = True") if len( args.runners ) > 1 or args.load_results: # Only do comparisons if there's actually something to compare. script.append_suffix("# Accuracy Comparison") compare_func_str = Script.invoke_if_nondefault( "CompareFunc.basic_compare_func", rtol=args.rtol, atol=args.atol, check_shapes=False if args.no_shape_check else None, fail_fast=args.fail_fast) compare_func = None if compare_func_str: script.add_import(imports=["CompareFunc"], frm="polygraphy.comparator") compare_func = "compare_func" script.append_suffix( Script.format_str("{:} = {:}", Inline(compare_func), Inline(compare_func_str))) compare_accuracy = Script.invoke("Comparator.compare_accuracy", Inline("results"), compare_func=Inline(compare_func) if compare_func is not None else None, fail_fast=args.fail_fast) script.append_suffix( Script.format_str("success &= bool({:})\n", Inline(compare_accuracy))) if args.validate: script.append_suffix( "# Validation\nsuccess &= Comparator.validate(results)\n") if cmd_run is None: cmd_run = Inline("' '.join(sys.argv)") script.append_suffix( Script.format_str( '# Report Results\ncmd_run={cmd}\nif success:\n G_LOGGER.success("PASSED | Command: {{}}".format(cmd_run))\nelse:\n G_LOGGER.error("FAILED | Command: {{}}".format(cmd_run))', cmd=cmd_run)) script.append_suffix("sys.exit(0 if success else 1)")
def add_to_script(self, script, results_name): if self.load_results: G_LOGGER.verbose("Will load runner results from: {:}".format( self.load_results)) script.add_import(imports=["misc"], frm="polygraphy.util") script.append_suffix( Script.format_str( "\n# Load results\nfor load_output in {:}:\n{:}{results}.extend(misc.pickle_load(load_output))", self.load_results, Inline(constants.TAB), results=results_name)) if self.top_k is not None: script.add_import(imports=["PostprocessFunc"], frm="polygraphy.comparator") script.append_suffix( Script.format_str( "\n# Postprocessing - Apply Top-{top_k}\n{results} = Comparator.postprocess({results}, PostprocessFunc.topk_func(k={top_k}))", top_k=self.top_k, results=results_name)) SUCCESS_VAR_NAME = Inline("success") script.append_suffix( "\n{success} = True".format(success=SUCCESS_VAR_NAME)) if len( self.runners ) > 1 or self.load_results: # Only do comparisons if there's actually something to compare. script.append_suffix("# Accuracy Comparison") compare_func_str = Script.invoke_if_nondefault( "CompareFunc.basic_compare_func", rtol=self.rtol, atol=self.atol, check_shapes=False if self.no_shape_check else None, fail_fast=self.fail_fast) compare_func = None if compare_func_str: script.add_import(imports=["CompareFunc"], frm="polygraphy.comparator") compare_func = "compare_func" script.append_suffix( Script.format_str("{:} = {:}", Inline(compare_func), Inline(compare_func_str))) compare_accuracy = Script.invoke( "Comparator.compare_accuracy", results_name, compare_func=Inline(compare_func) if compare_func is not None else None, fail_fast=self.fail_fast) script.append_suffix( Script.format_str("{success} &= bool({:})\n", Inline(compare_accuracy), success=SUCCESS_VAR_NAME)) if self.validate: script.append_suffix( "# Validation\n{success} &= Comparator.validate({results})\n". format(success=SUCCESS_VAR_NAME, results=results_name)) return SUCCESS_VAR_NAME