Esempio n. 1
0
    def run(self, args):
        if not args.convert_to:
            _, ext = os.path.splitext(args.output)
            if ext not in ModelArgs.EXT_MODEL_TYPE_MAPPING:
                G_LOGGER.exit(
                    "Could not automatically determine model type based on output path: {:}\n"
                    "Please specify the desired output format with --convert-to"
                    .format(args.output))
            convert_type = ModelArgs.ModelType(
                ModelArgs.EXT_MODEL_TYPE_MAPPING[ext])
        else:
            CONVERT_TO_MODEL_TYPE_MAPPING = {"onnx": "onnx", "trt": "engine"}
            convert_type = ModelArgs.ModelType(
                CONVERT_TO_MODEL_TYPE_MAPPING[args.convert_to])

        if convert_type.is_onnx():
            model = self.arg_groups[OnnxLoaderArgs].load_onnx()
            if args.fp_to_fp16:
                model = onnx_backend.convert_to_fp16(model)
            self.arg_groups[OnnxSaveArgs].save_onnx(model, args.output)
        elif convert_type.is_trt():
            with self.arg_groups[TrtEngineLoaderArgs].build_engine() as engine:
                self.arg_groups[TrtEngineSaveArgs].save_engine(
                    engine, args.output)
        else:
            G_LOGGER.exit(
                "Cannot convert to model type: {:}".format(convert_type))
Esempio n. 2
0
 def __init__(self):
     super().__init__("insert")
     self.subscribe_args(OnnxNodeArgs())
     self.subscribe_args(ModelArgs(model_required=True, inputs="--model-inputs", model_type="onnx"))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(OnnxSaveArgs(infer_shapes=True, required=True))
Esempio n. 3
0
    def test_load_graph(self):
        arg_group = ArgGroupTestHelper(TfLoaderArgs(), deps=[ModelArgs()])
        arg_group.parse_args([TF_MODELS["identity"].path, "--model-type=frozen"])
        graph, outputs = arg_group.load_graph()

        assert isinstance(graph, tf.Graph)
        assert outputs == ["Identity_2:0"]
Esempio n. 4
0
    def test_basic(self):
        arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()])
        arg_group.parse_args([ONNX_MODELS["identity_identity"].path, "--onnx-outputs=identity_out_0"])
        model = arg_group.load_onnx()

        assert len(model.graph.output) == 1
        assert model.graph.output[0].name == "identity_out_0"
Esempio n. 5
0
    def test_external_data(self):
        arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()])
        model = ONNX_MODELS["ext_weights"]
        arg_group.parse_args([model.path, "--load-external-data", model.ext_data])
        model = arg_group.load_onnx()

        assert len(model.graph.node) == 3
Esempio n. 6
0
 def __init__(self):
     super().__init__("model")
     self.subscribe_args(ModelArgs(model_required=True, inputs=None))
     self.subscribe_args(
         TfLoaderArgs(tftrt=False, artifacts=False, outputs=False))
     self.subscribe_args(OnnxLoaderArgs(outputs=False))
     self.subscribe_args(TrtLoaderArgs(config=False, outputs=False))
Esempio n. 7
0
 def __init__(self):
     super().__init__("run")
     self.subscribe_args(ModelArgs())
     self.subscribe_args(TfLoaderArgs(tftrt=True))
     self.subscribe_args(TfConfigArgs())
     self.subscribe_args(TfRunnerArgs())
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxSaveArgs(output="save-onnx", short_opt=None))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(save=True))
     self.subscribe_args(OnnxrtRunnerArgs())
     self.subscribe_args(PluginRefArgs())
     self.subscribe_args(
         TrtConfigArgs(random_data_calib_warning=False
                       ))  # We run calibration with the inference-time data
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
     self.subscribe_args(
         TrtEngineSaveArgs(output="save-engine", short_opt=None))
     self.subscribe_args(TrtEngineLoaderArgs(save=True))
     self.subscribe_args(TrtRunnerArgs())
     self.subscribe_args(TrtLegacyArgs())
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(ComparatorRunArgs())
     self.subscribe_args(ComparatorCompareArgs())
Esempio n. 8
0
 def __init__(self):
     super().__init__("trt-network")
     self.subscribe_args(ModelArgs(model_required=False, inputs=None))
     self.subscribe_args(TfLoaderArgs(artifacts=False))
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxLoaderArgs())
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
Esempio n. 9
0
 def __init__(self):
     super().__init__("reduce")
     self.subscribe_args(ArtifactSorterArgs("polygraphy_debug.onnx", prefer_artifacts=False))
     self.subscribe_args(ModelArgs(model_required=True, inputs="--model-inputs", model_type="onnx"))
     self.subscribe_args(OnnxSaveArgs())
     self.subscribe_args(OnnxShapeInferenceArgs(default=True, enable_force_fallback=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(DataLoaderArgs())  # For fallback shape inference
Esempio n. 10
0
    def test_input_metadata(self):
        arg_group = ArgGroupTestHelper(DataLoaderArgs(), deps=[ModelArgs()])
        arg_group.parse_args(["--input-shapes", "test0:[1,1,1]", "test1:[2,32,2]"])
        data_loader = arg_group.get_data_loader()

        for feed_dict in data_loader:
            assert feed_dict["test0"].shape == (1, 1, 1)
            assert feed_dict["test1"].shape == (2, 32, 2)
Esempio n. 11
0
 def __init__(self, name):
     super().__init__(name)
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(ModelArgs(model_required=True))
     self.subscribe_args(OnnxLoaderArgs(outputs=False))
     self.subscribe_args(TrtLoaderArgs())
     self.subscribe_args(TrtRunnerArgs())
     self.subscribe_args(ComparatorRunArgs(iters=False, write=False))
     self.subscribe_args(ComparatorCompareArgs())
Esempio n. 12
0
    def test_override_input_metadata(self):
        arg_group = ArgGroupTestHelper(DataLoaderArgs(), deps=[ModelArgs()])
        arg_group.parse_args([])
        data_loader = arg_group.get_data_loader(
            user_input_metadata=TensorMetadata().add(
                "test0", dtype=np.float32, shape=(4, 4)))

        for feed_dict in data_loader:
            assert feed_dict["test0"].shape == (4, 4)
Esempio n. 13
0
 def __init__(self):
     super().__init__("model")
     self.subscribe_args(ModelArgs(model_required=True, inputs=None))
     self.subscribe_args(TfLoaderArgs(artifacts=False, outputs=False))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs(outputs=False))
     self.subscribe_args(TrtEngineLoaderArgs())
Esempio n. 14
0
    def test_external_data(self):
        model = onnx_from_path(ONNX_MODELS["const_foldable"].path)
        arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
        with tempfile.NamedTemporaryFile() as path, tempfile.NamedTemporaryFile() as data:
            arg_group.parse_args(["-o", path.name, "--save-external-data", data.name])
            arg_group.save_onnx(model)

            check_file_non_empty(path.name)
            check_file_non_empty(data.name)
Esempio n. 15
0
def engine_loader_args():
    return ArgGroupTestHelper(TrtEngineLoaderArgs(),
                              deps=[
                                  ModelArgs(),
                                  OnnxLoaderArgs(),
                                  TrtConfigArgs(),
                                  TrtPluginLoaderArgs(),
                                  TrtNetworkLoaderArgs()
                              ])
Esempio n. 16
0
    def test_size_threshold(self):
        model = onnx_from_path(ONNX_MODELS["const_foldable"].path)
        arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
        with util.NamedTemporaryFile() as path, util.NamedTemporaryFile() as data:
            arg_group.parse_args(
                ["-o", path.name, "--save-external-data", data.name, "--external-data-size-threshold=1024"]
            )
            arg_group.save_onnx(model)

            assert is_file_non_empty(path.name)
            assert is_file_empty(data.name)
Esempio n. 17
0
 def __init__(self):
     super().__init__("extract")
     self.subscribe_args(
         ModelArgs(model_required=True,
                   inputs="--model-inputs",
                   model_type="onnx"))
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(
         OnnxShapeInferenceArgs(default=False, enable_force_fallback=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(OnnxSaveArgs(required=True))
Esempio n. 18
0
 def __init__(self):
     super().__init__("sanitize")
     self.subscribe_args(
         ModelArgs(model_required=True,
                   inputs="--override-inputs",
                   model_type="onnx"))
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(
         OnnxShapeInferenceArgs(default=True, enable_force_fallback=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=""))
     self.subscribe_args(OnnxSaveArgs(infer_shapes=True, required=True))
Esempio n. 19
0
    def test_shape_inference(self):
        # When using shape inference, we should load directly from the path
        arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs(), OnnxShapeInferenceArgs()])
        model = ONNX_MODELS["identity"]
        arg_group.parse_args([model.path, "--shape-inference"])

        assert arg_group.should_use_onnx_loader()

        script = Script()
        arg_group.add_onnx_loader(script)

        expected_loader = "InferShapes({:})".format(repr(model.path))
        assert expected_loader in str(script)
Esempio n. 20
0
 def __init__(self):
     super().__init__("convert")
     self.subscribe_args(ModelArgs(model_required=True))
     self.subscribe_args(TfLoaderArgs(artifacts=False))
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs())
     self.subscribe_args(OnnxSaveArgs(output=False))
     self.subscribe_args(DataLoaderArgs())  # For int8 calibration
     self.subscribe_args(TrtConfigArgs())
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
     self.subscribe_args(TrtEngineLoaderArgs())
     self.subscribe_args(TrtEngineSaveArgs(output=False))
Esempio n. 21
0
 def __init__(self):
     super().__init__("extract")
     self.subscribe_args(
         ModelArgs(
             model_required=True,
             inputs="--model-inputs",
             model_type="onnx",
             inputs_doc="Input shapes to use when generating data to run fallback shape inference. "
             "Has no effect if fallback shape inference is not run",
         )
     )
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(OnnxShapeInferenceArgs(default=False, enable_force_fallback=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(OnnxSaveArgs(required=True))
Esempio n. 22
0
    def test_load_network(self):
        arg_group = ArgGroupTestHelper(
            TrtNetworkLoaderArgs(),
            deps=[ModelArgs(),
                  OnnxLoaderArgs(),
                  TrtPluginLoaderArgs()])
        arg_group.parse_args([
            ONNX_MODELS["identity_identity"].path,
            "--trt-outputs=identity_out_0"
        ])

        builder, network, parser = arg_group.load_network()
        with builder, network:
            assert network.num_outputs == 1
            assert network.get_output(0).name == "identity_out_0"
Esempio n. 23
0
 def __init__(self):
     super().__init__("capability")
     self.subscribe_args(
         ModelArgs(model_required=True, inputs=None, model_type="onnx"))
     self.subscribe_args(OnnxShapeInferenceArgs(default=True))
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     # Disallow ext data path since we're writing multiple models - otherwise, it'll be clobbered each time.
     self.subscribe_args(
         OnnxSaveArgs(
             allow_ext_data_path=False,
             custom_help=
             "Directory to write out supported and unsupported subgraphs. "
             "Defaults to 'polygraphy_capability_dumps' in the current directory",
             default_output_path="polygraphy_capability_dumps",
         ))
Esempio n. 24
0
 def __init__(self, name, strict_types_default=None, prefer_artifacts=True):
     super().__init__(name)
     self.subscribe_args(
         ArtifactSorterArgs("polygraphy_debug.engine",
                            prefer_artifacts=prefer_artifacts))
     self.subscribe_args(ModelArgs(model_required=True, inputs=None))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(output_prefix=None))
     self.subscribe_args(DataLoaderArgs())  # For int8 calibration
     self.subscribe_args(
         TrtConfigArgs(strict_types_default=strict_types_default))
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
     self.subscribe_args(TrtEngineLoaderArgs())
     self.subscribe_args(TrtEngineSaveArgs(output=False))
Esempio n. 25
0
    def test_shape_inference_ext_data(self):
        arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs(), OnnxShapeInferenceArgs()])
        model = ONNX_MODELS["ext_weights"]
        arg_group.parse_args([model.path, "--external-data-dir", model.ext_data, "--shape-inference"])

        assert arg_group.should_use_onnx_loader()

        script = Script()
        arg_group.add_onnx_loader(script)

        expected_loader = "InferShapes({:}, external_data_dir={:})".format(repr(model.path), repr(model.ext_data))
        assert expected_loader in str(script)

        model = arg_group.load_onnx()
        _check_ext_weights_model(model)
Esempio n. 26
0
 def __init__(self):
     super().__init__("run")
     self.subscribe_args(ModelArgs())
     self.subscribe_args(TfLoaderArgs())
     self.subscribe_args(TfConfigArgs())
     self.subscribe_args(TfRunnerArgs())
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxLoaderArgs())
     self.subscribe_args(OnnxrtRunnerArgs())
     self.subscribe_args(OnnxtfRunnerArgs())
     self.subscribe_args(TrtLoaderArgs(network_api=True))
     self.subscribe_args(TrtRunnerArgs())
     self.subscribe_args(TrtLegacyArgs())
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(ComparatorRunArgs())
     self.subscribe_args(ComparatorCompareArgs())
Esempio n. 27
0
    def test_no_all_tensors_to_one_file(self):
        model = onnx_from_path(ONNX_MODELS["const_foldable"].path)
        arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
        with tempfile.TemporaryDirectory() as outdir:
            path = os.path.join(outdir, "model.onnx")
            arg_group.parse_args(
                [
                    "-o",
                    path,
                    "--save-external-data",
                    "--external-data-size-threshold=0",
                    "--no-save-all-tensors-to-one-file",
                ]
            )
            arg_group.save_onnx(model)

            assert is_file_non_empty(path)
            outfiles = glob.glob(os.path.join(outdir, "*"))
            assert len(outfiles) == 4
Esempio n. 28
0
 def __init__(self):
     super().__init__("run")
     self.subscribe_args(ModelArgs())
     self.subscribe_args(TfLoaderArgs(tftrt=True))
     self.subscribe_args(TfConfigArgs())
     self.subscribe_args(TfRunnerArgs())
     self.subscribe_args(Tf2OnnxLoaderArgs())
     self.subscribe_args(OnnxSaveArgs(output="save-onnx", short_opt=None))
     self.subscribe_args(OnnxShapeInferenceArgs())
     self.subscribe_args(OnnxLoaderArgs(save=True))
     self.subscribe_args(OnnxrtRunnerArgs())
     self.subscribe_args(TrtConfigArgs())
     self.subscribe_args(TrtPluginLoaderArgs())
     self.subscribe_args(TrtNetworkLoaderArgs())
     self.subscribe_args(
         TrtEngineSaveArgs(output="save-engine", short_opt=None))
     self.subscribe_args(TrtEngineLoaderArgs(save=True))
     self.subscribe_args(TrtRunnerArgs())
     self.subscribe_args(TrtLegacyArgs())
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(ComparatorRunArgs())
     self.subscribe_args(ComparatorCompareArgs())
Esempio n. 29
0
 def __init__(self):
     super().__init__("trt-config")
     self.subscribe_args(ModelArgs(model_required=False))
     self.subscribe_args(DataLoaderArgs())
     self.subscribe_args(TrtConfigArgs())
Esempio n. 30
0
    def test_fixed_model_type(self):
        group = ArgGroupTestHelper(ModelArgs(model_type="onnx"))
        group.parse_args(["model.pb"])

        assert group.model_type.is_onnx()