Ejemplo n.º 1
0
 def __init__(self):
     super().__init__("insert")
     self.subscribe_args(OnnxNodeArgs())
     self.subscribe_args(ModelArgs(model_required=True, inputs="--model-inputs", model_type="onnx"))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(OnnxSaveArgs(infer_shapes=True, required=True))
Ejemplo n.º 2
0
    def test_external_data(self):
        arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()])
        model = ONNX_MODELS["ext_weights"]
        arg_group.parse_args([model.path, "--load-external-data", model.ext_data])
        model = arg_group.load_onnx()

        assert len(model.graph.node) == 3
Ejemplo n.º 3
0
 def __init__(self):
     super().__init__("model")
     self.subscribe_args(ModelArgs(model_required=True, inputs=None))
     self.subscribe_args(
         TfLoaderArgs(tftrt=False, artifacts=False, outputs=False))
     self.subscribe_args(OnnxLoaderArgs(outputs=False))
     self.subscribe_args(TrtLoaderArgs(config=False, outputs=False))
Ejemplo n.º 4
0
 def __init__(self):
     super().__init__("run")
     self.subscribe_args(ModelArgs())
     self.subscribe_args(TfLoaderArgs(tftrt=True))
     self.subscribe_args(TfConfigArgs())
     self.subscribe_args(TfRunnerArgs())
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxSaveArgs(output="save-onnx", short_opt=None))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(save=True))
     self.subscribe_args(OnnxrtRunnerArgs())
     self.subscribe_args(PluginRefArgs())
     self.subscribe_args(
         TrtConfigArgs(random_data_calib_warning=False
                       ))  # We run calibration with the inference-time data
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
     self.subscribe_args(
         TrtEngineSaveArgs(output="save-engine", short_opt=None))
     self.subscribe_args(TrtEngineLoaderArgs(save=True))
     self.subscribe_args(TrtRunnerArgs())
     self.subscribe_args(TrtLegacyArgs())
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(ComparatorRunArgs())
     self.subscribe_args(ComparatorCompareArgs())
Ejemplo n.º 5
0
    def test_basic(self):
        arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()])
        arg_group.parse_args([ONNX_MODELS["identity_identity"].path, "--onnx-outputs=identity_out_0"])
        model = arg_group.load_onnx()

        assert len(model.graph.output) == 1
        assert model.graph.output[0].name == "identity_out_0"
Ejemplo n.º 6
0
 def __init__(self):
     super().__init__("trt-network")
     self.subscribe_args(ModelArgs(model_required=False, inputs=None))
     self.subscribe_args(TfLoaderArgs(artifacts=False))
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxLoaderArgs())
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
Ejemplo n.º 7
0
 def __init__(self):
     super().__init__("reduce")
     self.subscribe_args(ArtifactSorterArgs("polygraphy_debug.onnx", prefer_artifacts=False))
     self.subscribe_args(ModelArgs(model_required=True, inputs="--model-inputs", model_type="onnx"))
     self.subscribe_args(OnnxSaveArgs())
     self.subscribe_args(OnnxShapeInferenceArgs(default=True, enable_force_fallback=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(DataLoaderArgs())  # For fallback shape inference
Ejemplo n.º 8
0
 def __init__(self, name):
     super().__init__(name)
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(ModelArgs(model_required=True))
     self.subscribe_args(OnnxLoaderArgs(outputs=False))
     self.subscribe_args(TrtLoaderArgs())
     self.subscribe_args(TrtRunnerArgs())
     self.subscribe_args(ComparatorRunArgs(iters=False, write=False))
     self.subscribe_args(ComparatorCompareArgs())
Ejemplo n.º 9
0
    def test_external_data(self):
        model = onnx_from_path(ONNX_MODELS["const_foldable"].path)
        arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
        with tempfile.NamedTemporaryFile() as path, tempfile.NamedTemporaryFile() as data:
            arg_group.parse_args(["-o", path.name, "--save-external-data", data.name])
            arg_group.save_onnx(model)

            check_file_non_empty(path.name)
            check_file_non_empty(data.name)
Ejemplo n.º 10
0
 def __init__(self):
     super().__init__("model")
     self.subscribe_args(ModelArgs(model_required=True, inputs=None))
     self.subscribe_args(TfLoaderArgs(artifacts=False, outputs=False))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs(outputs=False))
     self.subscribe_args(TrtEngineLoaderArgs())
Ejemplo n.º 11
0
def engine_loader_args():
    return ArgGroupTestHelper(TrtEngineLoaderArgs(),
                              deps=[
                                  ModelArgs(),
                                  OnnxLoaderArgs(),
                                  TrtConfigArgs(),
                                  TrtPluginLoaderArgs(),
                                  TrtNetworkLoaderArgs()
                              ])
Ejemplo n.º 12
0
    def test_size_threshold(self):
        model = onnx_from_path(ONNX_MODELS["const_foldable"].path)
        arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
        with util.NamedTemporaryFile() as path, util.NamedTemporaryFile() as data:
            arg_group.parse_args(
                ["-o", path.name, "--save-external-data", data.name, "--external-data-size-threshold=1024"]
            )
            arg_group.save_onnx(model)

            assert is_file_non_empty(path.name)
            assert is_file_empty(data.name)
Ejemplo n.º 13
0
 def __init__(self):
     super().__init__("extract")
     self.subscribe_args(
         ModelArgs(model_required=True,
                   inputs="--model-inputs",
                   model_type="onnx"))
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(
         OnnxShapeInferenceArgs(default=False, enable_force_fallback=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(OnnxSaveArgs(required=True))
Ejemplo n.º 14
0
 def __init__(self):
     super().__init__("sanitize")
     self.subscribe_args(
         ModelArgs(model_required=True,
                   inputs="--override-inputs",
                   model_type="onnx"))
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(
         OnnxShapeInferenceArgs(default=True, enable_force_fallback=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=""))
     self.subscribe_args(OnnxSaveArgs(infer_shapes=True, required=True))
Ejemplo n.º 15
0
    def test_shape_inference(self):
        # When using shape inference, we should load directly from the path
        arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs(), OnnxShapeInferenceArgs()])
        model = ONNX_MODELS["identity"]
        arg_group.parse_args([model.path, "--shape-inference"])

        assert arg_group.should_use_onnx_loader()

        script = Script()
        arg_group.add_onnx_loader(script)

        expected_loader = "InferShapes({:})".format(repr(model.path))
        assert expected_loader in str(script)
Ejemplo n.º 16
0
 def __init__(self):
     super().__init__("convert")
     self.subscribe_args(ModelArgs(model_required=True))
     self.subscribe_args(TfLoaderArgs(artifacts=False))
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs())
     self.subscribe_args(OnnxSaveArgs(output=False))
     self.subscribe_args(DataLoaderArgs())  # For int8 calibration
     self.subscribe_args(TrtConfigArgs())
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
     self.subscribe_args(TrtEngineLoaderArgs())
     self.subscribe_args(TrtEngineSaveArgs(output=False))
Ejemplo n.º 17
0
 def __init__(self):
     super().__init__("extract")
     self.subscribe_args(
         ModelArgs(
             model_required=True,
             inputs="--model-inputs",
             model_type="onnx",
             inputs_doc="Input shapes to use when generating data to run fallback shape inference. "
             "Has no effect if fallback shape inference is not run",
         )
     )
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(OnnxShapeInferenceArgs(default=False, enable_force_fallback=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(OnnxSaveArgs(required=True))
Ejemplo n.º 18
0
    def test_load_network(self):
        arg_group = ArgGroupTestHelper(
            TrtNetworkLoaderArgs(),
            deps=[ModelArgs(),
                  OnnxLoaderArgs(),
                  TrtPluginLoaderArgs()])
        arg_group.parse_args([
            ONNX_MODELS["identity_identity"].path,
            "--trt-outputs=identity_out_0"
        ])

        builder, network, parser = arg_group.load_network()
        with builder, network:
            assert network.num_outputs == 1
            assert network.get_output(0).name == "identity_out_0"
Ejemplo n.º 19
0
 def __init__(self):
     super().__init__("capability")
     self.subscribe_args(
         ModelArgs(model_required=True, inputs=None, model_type="onnx"))
     self.subscribe_args(OnnxShapeInferenceArgs(default=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     # Disallow ext data path since we're writing multiple models - otherwise, it'll be clobbered each time.
     self.subscribe_args(
         OnnxSaveArgs(
             allow_ext_data_path=False,
             custom_help=
             "Directory to write out supported and unsupported subgraphs. "
             "Defaults to 'polygraphy_capability_dumps' in the current directory",
             default_output_path="polygraphy_capability_dumps",
         ))
Ejemplo n.º 20
0
 def __init__(self, name, strict_types_default=None, prefer_artifacts=True):
     super().__init__(name)
     self.subscribe_args(
         ArtifactSorterArgs("polygraphy_debug.engine",
                            prefer_artifacts=prefer_artifacts))
     self.subscribe_args(ModelArgs(model_required=True, inputs=None))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(DataLoaderArgs())  # For int8 calibration
     self.subscribe_args(
         TrtConfigArgs(strict_types_default=strict_types_default))
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
     self.subscribe_args(TrtEngineLoaderArgs())
     self.subscribe_args(TrtEngineSaveArgs(output=False))
Ejemplo n.º 21
0
    def test_shape_inference_ext_data(self):
        arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs(), OnnxShapeInferenceArgs()])
        model = ONNX_MODELS["ext_weights"]
        arg_group.parse_args([model.path, "--external-data-dir", model.ext_data, "--shape-inference"])

        assert arg_group.should_use_onnx_loader()

        script = Script()
        arg_group.add_onnx_loader(script)

        expected_loader = "InferShapes({:}, external_data_dir={:})".format(repr(model.path), repr(model.ext_data))
        assert expected_loader in str(script)

        model = arg_group.load_onnx()
        _check_ext_weights_model(model)
Ejemplo n.º 22
0
 def __init__(self):
     super().__init__("run")
     self.subscribe_args(ModelArgs())
     self.subscribe_args(TfLoaderArgs())
     self.subscribe_args(TfConfigArgs())
     self.subscribe_args(TfRunnerArgs())
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxLoaderArgs())
     self.subscribe_args(OnnxrtRunnerArgs())
     self.subscribe_args(OnnxtfRunnerArgs())
     self.subscribe_args(TrtLoaderArgs(network_api=True))
     self.subscribe_args(TrtRunnerArgs())
     self.subscribe_args(TrtLegacyArgs())
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(ComparatorRunArgs())
     self.subscribe_args(ComparatorCompareArgs())
Ejemplo n.º 23
0
    def test_no_all_tensors_to_one_file(self):
        model = onnx_from_path(ONNX_MODELS["const_foldable"].path)
        arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
        with tempfile.TemporaryDirectory() as outdir:
            path = os.path.join(outdir, "model.onnx")
            arg_group.parse_args(
                [
                    "-o",
                    path,
                    "--save-external-data",
                    "--external-data-size-threshold=0",
                    "--no-save-all-tensors-to-one-file",
                ]
            )
            arg_group.save_onnx(model)

            assert is_file_non_empty(path)
            outfiles = glob.glob(os.path.join(outdir, "*"))
            assert len(outfiles) == 4
Ejemplo n.º 24
0
 def __init__(self):
     super().__init__("run")
     self.subscribe_args(ModelArgs())
     self.subscribe_args(TfLoaderArgs(tftrt=True))
     self.subscribe_args(TfConfigArgs())
     self.subscribe_args(TfRunnerArgs())
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxSaveArgs(output="save-onnx", short_opt=None))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(save=True))
     self.subscribe_args(OnnxrtRunnerArgs())
     self.subscribe_args(TrtConfigArgs())
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
     self.subscribe_args(
         TrtEngineSaveArgs(output="save-engine", short_opt=None))
     self.subscribe_args(TrtEngineLoaderArgs(save=True))
     self.subscribe_args(TrtRunnerArgs())
     self.subscribe_args(TrtLegacyArgs())
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(ComparatorRunArgs())
     self.subscribe_args(ComparatorCompareArgs())
Ejemplo n.º 25
0
 def test_defaults(self):
     arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
     arg_group.parse_args([])
     assert arg_group.size_threshold is None
Ejemplo n.º 26
0
 def test_external_data(self):
     arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()])
     model = ONNX_MODELS["ext_weights"]
     arg_group.parse_args([model.path, "--external-data-dir", model.ext_data])
     model = arg_group.load_onnx()
     _check_ext_weights_model(model)
Ejemplo n.º 27
0
 def __init__(self, name, inputs=None, data=False, shape_inference_default=None):
     super().__init__(name)
     self.subscribe_args(ModelArgs(model_required=True, inputs=inputs, model_type="onnx"))
     self.subscribe_args(OnnxLoaderArgs(write=False, outputs=False, shape_inference_default=shape_inference_default))
     if data:
         self.subscribe_args(DataLoaderArgs())
Ejemplo n.º 28
0
 def test_size_threshold_parsing(self, arg, expected):
     arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
     arg_group.parse_args(["--external-data-size-threshold", arg])
     assert arg_group.size_threshold == expected