def __init__(self): super().__init__("insert") self.subscribe_args(OnnxNodeArgs()) self.subscribe_args(ModelArgs(model_required=True, inputs="--model-inputs", model_type="onnx")) self.subscribe_args(OnnxShapeInferenceArgs()) self.subscribe_args(OnnxLoaderArgs(output_prefix=None)) self.subscribe_args(OnnxSaveArgs(infer_shapes=True, required=True))
def test_external_data(self): arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()]) model = ONNX_MODELS["ext_weights"] arg_group.parse_args([model.path, "--load-external-data", model.ext_data]) model = arg_group.load_onnx() assert len(model.graph.node) == 3
def __init__(self): super().__init__("model") self.subscribe_args(ModelArgs(model_required=True, inputs=None)) self.subscribe_args( TfLoaderArgs(tftrt=False, artifacts=False, outputs=False)) self.subscribe_args(OnnxLoaderArgs(outputs=False)) self.subscribe_args(TrtLoaderArgs(config=False, outputs=False))
def __init__(self): super().__init__("run") self.subscribe_args(ModelArgs()) self.subscribe_args(TfLoaderArgs(tftrt=True)) self.subscribe_args(TfConfigArgs()) self.subscribe_args(TfRunnerArgs()) self.subscribe_args(Tf2OnnxLoaderArgs()) self.subscribe_args(OnnxSaveArgs(output="save-onnx", short_opt=None)) self.subscribe_args(OnnxShapeInferenceArgs()) self.subscribe_args(OnnxLoaderArgs(save=True)) self.subscribe_args(OnnxrtRunnerArgs()) self.subscribe_args(PluginRefArgs()) self.subscribe_args( TrtConfigArgs(random_data_calib_warning=False )) # We run calibration with the inference-time data self.subscribe_args(TrtPluginLoaderArgs()) self.subscribe_args(TrtNetworkLoaderArgs()) self.subscribe_args( TrtEngineSaveArgs(output="save-engine", short_opt=None)) self.subscribe_args(TrtEngineLoaderArgs(save=True)) self.subscribe_args(TrtRunnerArgs()) self.subscribe_args(TrtLegacyArgs()) self.subscribe_args(DataLoaderArgs()) self.subscribe_args(ComparatorRunArgs()) self.subscribe_args(ComparatorCompareArgs())
def test_basic(self): arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()]) arg_group.parse_args([ONNX_MODELS["identity_identity"].path, "--onnx-outputs=identity_out_0"]) model = arg_group.load_onnx() assert len(model.graph.output) == 1 assert model.graph.output[0].name == "identity_out_0"
def __init__(self): super().__init__("trt-network") self.subscribe_args(ModelArgs(model_required=False, inputs=None)) self.subscribe_args(TfLoaderArgs(artifacts=False)) self.subscribe_args(Tf2OnnxLoaderArgs()) self.subscribe_args(OnnxLoaderArgs()) self.subscribe_args(TrtPluginLoaderArgs()) self.subscribe_args(TrtNetworkLoaderArgs())
def __init__(self): super().__init__("reduce") self.subscribe_args(ArtifactSorterArgs("polygraphy_debug.onnx", prefer_artifacts=False)) self.subscribe_args(ModelArgs(model_required=True, inputs="--model-inputs", model_type="onnx")) self.subscribe_args(OnnxSaveArgs()) self.subscribe_args(OnnxShapeInferenceArgs(default=True, enable_force_fallback=True)) self.subscribe_args(OnnxLoaderArgs(output_prefix=None)) self.subscribe_args(DataLoaderArgs()) # For fallback shape inference
def __init__(self, name): super().__init__(name) self.subscribe_args(DataLoaderArgs()) self.subscribe_args(ModelArgs(model_required=True)) self.subscribe_args(OnnxLoaderArgs(outputs=False)) self.subscribe_args(TrtLoaderArgs()) self.subscribe_args(TrtRunnerArgs()) self.subscribe_args(ComparatorRunArgs(iters=False, write=False)) self.subscribe_args(ComparatorCompareArgs())
def test_external_data(self): model = onnx_from_path(ONNX_MODELS["const_foldable"].path) arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()]) with tempfile.NamedTemporaryFile() as path, tempfile.NamedTemporaryFile() as data: arg_group.parse_args(["-o", path.name, "--save-external-data", data.name]) arg_group.save_onnx(model) check_file_non_empty(path.name) check_file_non_empty(data.name)
def __init__(self): super().__init__("model") self.subscribe_args(ModelArgs(model_required=True, inputs=None)) self.subscribe_args(TfLoaderArgs(artifacts=False, outputs=False)) self.subscribe_args(OnnxShapeInferenceArgs()) self.subscribe_args(OnnxLoaderArgs(output_prefix=None)) self.subscribe_args(TrtPluginLoaderArgs()) self.subscribe_args(TrtNetworkLoaderArgs(outputs=False)) self.subscribe_args(TrtEngineLoaderArgs())
def engine_loader_args(): return ArgGroupTestHelper(TrtEngineLoaderArgs(), deps=[ ModelArgs(), OnnxLoaderArgs(), TrtConfigArgs(), TrtPluginLoaderArgs(), TrtNetworkLoaderArgs() ])
def test_size_threshold(self): model = onnx_from_path(ONNX_MODELS["const_foldable"].path) arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()]) with util.NamedTemporaryFile() as path, util.NamedTemporaryFile() as data: arg_group.parse_args( ["-o", path.name, "--save-external-data", data.name, "--external-data-size-threshold=1024"] ) arg_group.save_onnx(model) assert is_file_non_empty(path.name) assert is_file_empty(data.name)
def __init__(self): super().__init__("extract") self.subscribe_args( ModelArgs(model_required=True, inputs="--model-inputs", model_type="onnx")) self.subscribe_args(DataLoaderArgs()) self.subscribe_args( OnnxShapeInferenceArgs(default=False, enable_force_fallback=True)) self.subscribe_args(OnnxLoaderArgs(output_prefix=None)) self.subscribe_args(OnnxSaveArgs(required=True))
def __init__(self): super().__init__("sanitize") self.subscribe_args( ModelArgs(model_required=True, inputs="--override-inputs", model_type="onnx")) self.subscribe_args(DataLoaderArgs()) self.subscribe_args( OnnxShapeInferenceArgs(default=True, enable_force_fallback=True)) self.subscribe_args(OnnxLoaderArgs(output_prefix="")) self.subscribe_args(OnnxSaveArgs(infer_shapes=True, required=True))
def test_shape_inference(self): # When using shape inference, we should load directly from the path arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs(), OnnxShapeInferenceArgs()]) model = ONNX_MODELS["identity"] arg_group.parse_args([model.path, "--shape-inference"]) assert arg_group.should_use_onnx_loader() script = Script() arg_group.add_onnx_loader(script) expected_loader = "InferShapes({:})".format(repr(model.path)) assert expected_loader in str(script)
def __init__(self): super().__init__("convert") self.subscribe_args(ModelArgs(model_required=True)) self.subscribe_args(TfLoaderArgs(artifacts=False)) self.subscribe_args(Tf2OnnxLoaderArgs()) self.subscribe_args(OnnxShapeInferenceArgs()) self.subscribe_args(OnnxLoaderArgs()) self.subscribe_args(OnnxSaveArgs(output=False)) self.subscribe_args(DataLoaderArgs()) # For int8 calibration self.subscribe_args(TrtConfigArgs()) self.subscribe_args(TrtPluginLoaderArgs()) self.subscribe_args(TrtNetworkLoaderArgs()) self.subscribe_args(TrtEngineLoaderArgs()) self.subscribe_args(TrtEngineSaveArgs(output=False))
def __init__(self): super().__init__("extract") self.subscribe_args( ModelArgs( model_required=True, inputs="--model-inputs", model_type="onnx", inputs_doc="Input shapes to use when generating data to run fallback shape inference. " "Has no effect if fallback shape inference is not run", ) ) self.subscribe_args(DataLoaderArgs()) self.subscribe_args(OnnxShapeInferenceArgs(default=False, enable_force_fallback=True)) self.subscribe_args(OnnxLoaderArgs(output_prefix=None)) self.subscribe_args(OnnxSaveArgs(required=True))
def test_load_network(self): arg_group = ArgGroupTestHelper( TrtNetworkLoaderArgs(), deps=[ModelArgs(), OnnxLoaderArgs(), TrtPluginLoaderArgs()]) arg_group.parse_args([ ONNX_MODELS["identity_identity"].path, "--trt-outputs=identity_out_0" ]) builder, network, parser = arg_group.load_network() with builder, network: assert network.num_outputs == 1 assert network.get_output(0).name == "identity_out_0"
def __init__(self): super().__init__("capability") self.subscribe_args( ModelArgs(model_required=True, inputs=None, model_type="onnx")) self.subscribe_args(OnnxShapeInferenceArgs(default=True)) self.subscribe_args(OnnxLoaderArgs(output_prefix=None)) # Disallow ext data path since we're writing multiple models - otherwise, it'll be clobbered each time. self.subscribe_args( OnnxSaveArgs( allow_ext_data_path=False, custom_help= "Directory to write out supported and unsupported subgraphs. " "Defaults to 'polygraphy_capability_dumps' in the current directory", default_output_path="polygraphy_capability_dumps", ))
def __init__(self, name, strict_types_default=None, prefer_artifacts=True): super().__init__(name) self.subscribe_args( ArtifactSorterArgs("polygraphy_debug.engine", prefer_artifacts=prefer_artifacts)) self.subscribe_args(ModelArgs(model_required=True, inputs=None)) self.subscribe_args(OnnxShapeInferenceArgs()) self.subscribe_args(OnnxLoaderArgs(output_prefix=None)) self.subscribe_args(DataLoaderArgs()) # For int8 calibration self.subscribe_args( TrtConfigArgs(strict_types_default=strict_types_default)) self.subscribe_args(TrtPluginLoaderArgs()) self.subscribe_args(TrtNetworkLoaderArgs()) self.subscribe_args(TrtEngineLoaderArgs()) self.subscribe_args(TrtEngineSaveArgs(output=False))
def test_shape_inference_ext_data(self): arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs(), OnnxShapeInferenceArgs()]) model = ONNX_MODELS["ext_weights"] arg_group.parse_args([model.path, "--external-data-dir", model.ext_data, "--shape-inference"]) assert arg_group.should_use_onnx_loader() script = Script() arg_group.add_onnx_loader(script) expected_loader = "InferShapes({:}, external_data_dir={:})".format(repr(model.path), repr(model.ext_data)) assert expected_loader in str(script) model = arg_group.load_onnx() _check_ext_weights_model(model)
def __init__(self): super().__init__("run") self.subscribe_args(ModelArgs()) self.subscribe_args(TfLoaderArgs()) self.subscribe_args(TfConfigArgs()) self.subscribe_args(TfRunnerArgs()) self.subscribe_args(Tf2OnnxLoaderArgs()) self.subscribe_args(OnnxLoaderArgs()) self.subscribe_args(OnnxrtRunnerArgs()) self.subscribe_args(OnnxtfRunnerArgs()) self.subscribe_args(TrtLoaderArgs(network_api=True)) self.subscribe_args(TrtRunnerArgs()) self.subscribe_args(TrtLegacyArgs()) self.subscribe_args(DataLoaderArgs()) self.subscribe_args(ComparatorRunArgs()) self.subscribe_args(ComparatorCompareArgs())
def test_no_all_tensors_to_one_file(self): model = onnx_from_path(ONNX_MODELS["const_foldable"].path) arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()]) with tempfile.TemporaryDirectory() as outdir: path = os.path.join(outdir, "model.onnx") arg_group.parse_args( [ "-o", path, "--save-external-data", "--external-data-size-threshold=0", "--no-save-all-tensors-to-one-file", ] ) arg_group.save_onnx(model) assert is_file_non_empty(path) outfiles = glob.glob(os.path.join(outdir, "*")) assert len(outfiles) == 4
def __init__(self): super().__init__("run") self.subscribe_args(ModelArgs()) self.subscribe_args(TfLoaderArgs(tftrt=True)) self.subscribe_args(TfConfigArgs()) self.subscribe_args(TfRunnerArgs()) self.subscribe_args(Tf2OnnxLoaderArgs()) self.subscribe_args(OnnxSaveArgs(output="save-onnx", short_opt=None)) self.subscribe_args(OnnxShapeInferenceArgs()) self.subscribe_args(OnnxLoaderArgs(save=True)) self.subscribe_args(OnnxrtRunnerArgs()) self.subscribe_args(TrtConfigArgs()) self.subscribe_args(TrtPluginLoaderArgs()) self.subscribe_args(TrtNetworkLoaderArgs()) self.subscribe_args( TrtEngineSaveArgs(output="save-engine", short_opt=None)) self.subscribe_args(TrtEngineLoaderArgs(save=True)) self.subscribe_args(TrtRunnerArgs()) self.subscribe_args(TrtLegacyArgs()) self.subscribe_args(DataLoaderArgs()) self.subscribe_args(ComparatorRunArgs()) self.subscribe_args(ComparatorCompareArgs())
def test_defaults(self): arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()]) arg_group.parse_args([]) assert arg_group.size_threshold is None
def test_external_data(self): arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()]) model = ONNX_MODELS["ext_weights"] arg_group.parse_args([model.path, "--external-data-dir", model.ext_data]) model = arg_group.load_onnx() _check_ext_weights_model(model)
def __init__(self, name, inputs=None, data=False, shape_inference_default=None): super().__init__(name) self.subscribe_args(ModelArgs(model_required=True, inputs=inputs, model_type="onnx")) self.subscribe_args(OnnxLoaderArgs(write=False, outputs=False, shape_inference_default=shape_inference_default)) if data: self.subscribe_args(DataLoaderArgs())
def test_size_threshold_parsing(self, arg, expected): arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()]) arg_group.parse_args(["--external-data-size-threshold", arg]) assert arg_group.size_threshold == expected