def should_use_onnx_loader(self, disable_custom_outputs=None): """ Whether this model needs to be loaded via a Polygraphy ONNX loader, e.g., in case it needs modifications. """ tmp_script = Script() inp_loader = "check_needs_modify" needs_modify = self._get_modify_onnx_loader( tmp_script, inp_loader, disable_custom_outputs) != inp_loader # Currently, other loaders do not support external data, so we must fall back to the ONNX loader if it's present. return not self.model_args.model_type.is_onnx( ) or needs_modify or self.load_external_data
def test_shape_inference(self): # When using shape inference, we should load directly from the path arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs(), OnnxShapeInferenceArgs()]) model = ONNX_MODELS["identity"] arg_group.parse_args([model.path, "--shape-inference"]) assert arg_group.should_use_onnx_loader() script = Script() arg_group.add_onnx_loader(script) expected_loader = "InferShapes({:})".format(repr(model.path)) assert expected_loader in str(script)
def test_shape_inference_ext_data(self): arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs(), OnnxShapeInferenceArgs()]) model = ONNX_MODELS["ext_weights"] arg_group.parse_args([model.path, "--external-data-dir", model.ext_data, "--shape-inference"]) assert arg_group.should_use_onnx_loader() script = Script() arg_group.add_onnx_loader(script) expected_loader = "InferShapes({:}, external_data_dir={:})".format(repr(model.path), repr(model.ext_data)) assert expected_loader in str(script) model = arg_group.load_onnx() _check_ext_weights_model(model)
def run_script(script_func, *args): """ Populates a script using the provided callable, then returns the variable indicated by the return value of the callable. Args: script_func (Callable(Script, *args) -> str): A callable that populates a Script and then returns the name of an object defined within the script to retrieve. args: Additional positional argruments to pass to script_func. The script_func should accept these by variable name instead of taking the values themselves. Values of ``None`` will be passed directly instead of by variable name. Returns: object: An object defined within the script, or ``None`` if it is not defined by the script. """ script = Script() arg_names = [] for index, arg in enumerate(args): if arg is not None: arg_name = safe("__arg{:}", index) locals()[arg_name.unwrap()] = arg arg_names.append(inline(arg_name)) else: arg_names.append(None) safe_ret_name = script_func(script, *arg_names) exec(str(script), globals(), locals()) if safe_ret_name is not None: ret_name = ensure_safe(safe_ret_name).unwrap() if ret_name in locals(): return locals()[ret_name] return None
def build_script(self, args): script = Script( summary=generate_summary(self.arg_groups[ModelArgs].model_file, args.runners, args.load_results)) self.arg_groups[LoggerArgs].add_to_script(script) if not args.runners: G_LOGGER.warning( "No runners have been selected. Inference will not be run!") for runner_arg in args.runners: add_runner_func = { "tf": self.arg_groups[TfRunnerArgs].add_to_script, "onnxrt": self.arg_groups[OnnxrtRunnerArgs].add_to_script, "trt": self.arg_groups[TrtRunnerArgs].add_to_script, "trt_legacy": self.arg_groups[TrtLegacyArgs].add_to_script, "pluginref": self.arg_groups[PluginRefArgs].add_to_script, }[runner_arg] add_runner_func(script) RESULTS_VAR_NAME = self.arg_groups[ComparatorRunArgs].add_to_script( script) SUCCESS_VAR_NAME = self.arg_groups[ ComparatorCompareArgs].add_to_script(script, results_name=RESULTS_VAR_NAME) script.add_import(imports=["sys"]) cmd_run = inline(safe("' '.join(sys.argv)")) exit_status = safe( "# Report Results\n" "cmd_run = {cmd}\n" "if not {success}:\n" '\tG_LOGGER.critical("FAILED | Command: {{}}".format(cmd_run))\n' 'G_LOGGER.finish("PASSED | Command: {{}}".format(cmd_run))\n', cmd=cmd_run, success=SUCCESS_VAR_NAME, ) script.append_suffix(exit_status) return script
def run(self, args): script = Script(summary="Creates a TensorRT Builder Configuration.", always_create_runners=False) script.add_import(imports=["func"], frm="polygraphy") script.add_import(imports=["tensorrt as trt"]) loader_name = self.arg_groups[TrtConfigArgs].add_trt_config_loader( script) if not loader_name: script.add_import(imports=["CreateConfig"], frm="polygraphy.backend.trt") loader_name = script.add_loader(safe("CreateConfig()"), "create_trt_config") params = safe("config") script.append_suffix(safe("@func.extend({:})", inline(loader_name))) script.append_suffix(safe("def load_config({:}):", inline(params))) script.append_suffix( safe( "\tpass # TODO: Set up the builder configuration here. This function should not return anything." )) script.save(args.output)
def test_add_funcs_fail_on_unsafe(self, func): script = Script() with pytest.raises(PolygraphyInternalException, match="was not checked for safety"): func(script)
def make_test_string(): return Script.String("test")
def run(self, args): script = Script( summary= "Defines or modifies a TensorRT Network using the Network API.", always_create_runners=False) script.add_import(imports=["func"], frm="polygraphy") script.add_import(imports=["tensorrt as trt"]) if self.arg_groups[ModelArgs].model_file is not None: loader_name = self.arg_groups[ TrtNetworkLoaderArgs].add_trt_network_loader(script) params = safe("builder, network, parser") else: script.add_import(imports=["CreateNetwork"], frm="polygraphy.backend.trt") loader_name = safe("CreateNetwork()") params = safe("builder, network") script.append_suffix(safe("@func.extend({:})", inline(loader_name))) script.append_suffix(safe("def load_network({:}):", inline(params))) script.append_suffix( safe( "\tpass # TODO: Set up the network here. This function should not return anything." )) script.save(args.output)