コード例 #1
0
    def test_trt_network_config_script_to_engine(self):
        script = dedent("""
        from polygraphy.backend.trt import CreateNetwork, CreateConfig
        from polygraphy import func
        import tensorrt as trt

        @func.extend(CreateNetwork())
        def my_load_network(builder, network):
            inp = network.add_input("input", dtype=trt.float32, shape=(1, 1))
            out = network.add_identity(inp).get_output(0)
            network.mark_output(out)

        @func.extend(CreateConfig())
        def load_config(config):
            config.set_flag(trt.BuilderFlag.FP16)
        """)

        with util.NamedTemporaryFile(
                "w+",
                suffix=".py") as f, util.NamedTemporaryFile() as outmodel:
            f.write(script)
            f.flush()

            run_polygraphy_convert([
                f.name,
                "--model-type=trt-network-script",
                "--trt-network-func-name=my_load_network",
                "--trt-config-script",
                f.name,
                "--convert-to=trt",
                "-o",
                outmodel.name,
            ])
            self.check_engine(outmodel.name)
コード例 #2
0
def replay(request):
    """
    Returns:
        Tuple[FakeAlgorithmContext, Algorithm, FakeAlgorithm,
              Union[str, TacticReplayData], Union[str, TacticReplayData]]:
                This fixture returns 5 things:
                1. A fake TensorRT algorithm context
                2. A Polygraphy Algorithm instance
                3. A fake TensorRT algorithm (with the same information as (2))
                4. An input tactic replay data, populated with the Polygraphy Algorithm (2), either
                    as a ``TacticReplayData`` instance, or a path.
                5. An output tactic replay data, empty, either as a ``TacticReplayData`` instance, or
                    a path.
    """
    jsonify = request.param

    name = "node_of_y"
    context = fake_context(name)

    trt_algo = fake_algo()
    poly_algo = Algorithm.from_trt(context, trt_algo)

    in_replay_data = TacticReplayData().add(name, poly_algo)
    out_replay_data = TacticReplayData()
    if jsonify:
        inpath = util.NamedTemporaryFile("w")
        in_replay_data.save(inpath.name)
        in_replay_data = inpath.name

        outpath = util.NamedTemporaryFile("r")
        out_replay_data = outpath.name

    yield context, poly_algo, trt_algo, in_replay_data, out_replay_data
コード例 #3
0
ファイル: test_loader.py プロジェクト: phongphuhanam/TensorRT
    def test_external_data(self):
        model = onnx_from_path(ONNX_MODELS["const_foldable"].path)
        arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
        with util.NamedTemporaryFile() as path, util.NamedTemporaryFile() as data:
            arg_group.parse_args(
                ["-o", path.name, "--save-external-data", data.name, "--external-data-size-threshold=0"]
            )
            arg_group.save_onnx(model)

            assert is_file_non_empty(path.name)
            assert is_file_non_empty(data.name)
コード例 #4
0
ファイル: test_loader.py プロジェクト: phongphuhanam/TensorRT
 def test_external_data(self):
     with util.NamedTemporaryFile() as path, util.NamedTemporaryFile(
     ) as data:
         model = OnnxFromPath(ONNX_MODELS["const_foldable"].path)
         loader = SaveOnnx(model,
                           path.name,
                           external_data_path=data.name,
                           size_threshold=0)
         loader()
         assert is_file_non_empty(path.name)
         assert is_file_non_empty(data.name)
コード例 #5
0
ファイル: test_config.py プロジェクト: phongphuhanam/TensorRT
    def test_config_script(self):
        arg_group = ArgGroupTestHelper(TrtConfigArgs())

        with util.NamedTemporaryFile("w+", suffix=".py") as f:
            f.write(
                dedent(
                    """
                from polygraphy.backend.trt import CreateConfig
                from polygraphy import func
                import tensorrt as trt

                @func.extend(CreateConfig())
                def my_load_config(config):
                    config.set_flag(trt.BuilderFlag.FP16)
            """
                )
            )
            f.flush()

            arg_group.parse_args(["--trt-config-script", f.name, "--trt-config-func-name=my_load_config"])
            assert arg_group.trt_config_script == f.name
            assert arg_group.trt_config_func_name == "my_load_config"

            builder, network = create_network()
            with builder, network, arg_group.create_config(builder, network) as config:
                assert isinstance(config, trt.IBuilderConfig)
                assert config.get_flag(trt.BuilderFlag.FP16)
コード例 #6
0
 def test_tf2onnx(self):
     with util.NamedTemporaryFile(suffix=".onnx") as outmodel:
         run_polygraphy_convert([
             TF_MODELS["identity"].path, "--model-type=frozen", "-o",
             outmodel.name
         ])
         assert onnx.load(outmodel.name)
コード例 #7
0
    def test_import_from_script(self, loader):
        script = dedent("""
        from polygraphy.backend.trt import CreateNetwork
        from polygraphy import func
        import tensorrt as trt

        @func.extend(CreateNetwork())
        def load_network(builder, network):
            inp = network.add_input("input", dtype=trt.float32, shape=(1, 1))
            out = network.add_identity(inp).get_output(0)
            network.mark_output(out)
        """)

        with util.NamedTemporaryFile("w+", suffix=".py") as f:
            f.write(script)
            f.flush()

            if loader == InvokeFromScript:
                load_network = loader(f.name, "load_network")
                builder, network = load_network()
            else:
                builder, network = loader(f.name, "load_network")
            with builder, network:
                assert isinstance(builder, trt.Builder)
                assert isinstance(network, trt.INetworkDefinition)
                assert network.num_layers == 1
                assert network.get_layer(0).type == trt.LayerType.IDENTITY
コード例 #8
0
    def test_data_loader_script(self):
        arg_group = ArgGroupTestHelper(DataLoaderArgs())

        with util.NamedTemporaryFile("w+", suffix=".py") as f:
            f.write(
                dedent(
                    """
                    import numpy as np

                    def my_load_data():
                        for _ in range(5):
                            yield {"inp": np.ones((3, 5), dtype=np.float32) * 6.4341}
                    """
                )
            )
            f.flush()

            arg_group.parse_args(["--data-loader-script", f.name, "--data-loader-func-name=my_load_data"])

            assert arg_group.data_loader_script == f.name
            assert arg_group.data_loader_func_name == "my_load_data"

            data_loader = arg_group.get_data_loader()
            data = list(data_loader)
            assert len(data) == 5
            assert all(np.all(d["inp"] == np.ones((3, 5), dtype=np.float32) * 6.4341) for d in data)
コード例 #9
0
 def test_tf_save_pb(self):
     with util.NamedTemporaryFile() as outpath:
         run_polygraphy_run([
             TF_MODELS["identity"].path, "--tf",
             "--gpu-memory-fraction=0.5", "--save-pb", outpath.name
         ])
         assert is_file_non_empty(outpath.name)
コード例 #10
0
 def test_onnx_to_trt(self):
     with util.NamedTemporaryFile(suffix=".engine") as outmodel:
         run_polygraphy_convert([
             ONNX_MODELS["identity"].path, "--model-type=onnx", "-o",
             outmodel.name
         ])
         self.check_engine(outmodel.name)
コード例 #11
0
 def test_tf_to_onnx_to_trt(self):
     with util.NamedTemporaryFile() as outmodel:
         run_polygraphy_convert([
             TF_MODELS["identity"].path, "--model-type=frozen",
             "--convert-to=trt", "-o", outmodel.name
         ])
         self.check_engine(outmodel.name)
コード例 #12
0
 def test_onnx_rt_save_onnx(self):
     with util.NamedTemporaryFile() as outpath:
         run_polygraphy_run([
             ONNX_MODELS["identity"].path, "--onnxrt", "--save-onnx",
             outpath.name
         ])
         assert is_file_non_empty(outpath.name)
         assert onnx.load(outpath.name)
コード例 #13
0
 def test_tf2onnx_save_onnx(self):
     with util.NamedTemporaryFile() as outpath:
         run_polygraphy_run([
             TF_MODELS["identity"].path, "--onnxrt", "--model-type=frozen",
             "--save-onnx", outpath.name
         ])
         assert is_file_non_empty(outpath.name)
         assert onnx.load(outpath.name)
コード例 #14
0
    def test_serialized_engine_loader_from_lambda(self, identity_engine):
        with util.NamedTemporaryFile() as outpath:
            with open(outpath.name, "wb") as f, identity_engine.serialize() as buffer:
                f.write(buffer)

            loader = EngineFromBytes(lambda: open(outpath.name, "rb").read())
            with loader() as engine:
                assert isinstance(engine, trt.ICudaEngine)
コード例 #15
0
 def test_save_load_engine(self):
     with util.NamedTemporaryFile() as outpath:
         run_polygraphy_run([
             ONNX_MODELS["identity"].path, "--trt", "--save-engine",
             outpath.name
         ])
         assert is_file_non_empty(outpath.name)
         run_polygraphy_run(["--trt", outpath.name, "--model-type=engine"])
コード例 #16
0
    def test_no_opts(self):
        with util.NamedTemporaryFile("w+", suffix=".py") as template:
            run_polygraphy_template(["trt-config", "-o", template.name])

            builder, network = create_network()
            create_config = InvokeFromScript(template.name, "load_config")
            with builder, network, create_config(builder, network) as config:
                assert isinstance(config, trt.IBuilderConfig)
コード例 #17
0
ファイル: test_runner.py プロジェクト: phongphuhanam/TensorRT
 def test_save_timeline(self):
     model = TF_MODELS["identity"]
     with util.NamedTemporaryFile() as outpath:
         with TfRunner(SessionFromGraph(model.loader),
                       allow_growth=True,
                       save_timeline=outpath.name) as runner:
             model.check_runner(runner)
             assert is_file_non_empty(outpath.name)
コード例 #18
0
 def test_no_shape_inference_if_has_metadata(self):
     with util.NamedTemporaryFile() as outmodel:
         status = run_polygraphy_surgeon([
             "extract", ONNX_MODELS["identity_identity"].path, "-o",
             outmodel.name, "--inputs", "X:auto:auto"
         ])
         onnx_model_sanity_check(outmodel.name)
         assert not was_shape_inference_run(status)
コード例 #19
0
 def test_tf_save_timeline(self):
     with util.NamedTemporaryFile() as outpath:
         run_polygraphy_run([
             TF_MODELS["identity"].path, "--tf",
             "--gpu-memory-fraction=0.5", "--save-timeline", outpath.name
         ])
         timelines = glob.glob(os.path.join(outpath.name, "*"))
         for timeline in timelines:
             assert is_file_non_empty(timeline)
コード例 #20
0
    def test_modify_onnx_outputs(self):
        with util.NamedTemporaryFile(suffix=".onnx") as outmodel:
            run_polygraphy_surgeon([
                "sanitize", ONNX_MODELS["identity_identity"].path, "-o",
                outmodel.name, "--outputs", "mark", "all"
            ])

            model = onnx.load(outmodel.name)
            assert len(model.graph.output) == 2
コード例 #21
0
 def test_override_shapes_no_clear_const_tensors_meta(self):
     with util.NamedTemporaryFile() as outmodel:
         run_polygraphy_surgeon([
             "sanitize",
             ONNX_MODELS["const_foldable"].path,
             "-o",
             outmodel.name,
             "--override-input-shapes=input:[1,3]",
         ])
コード例 #22
0
ファイル: test_logger.py プロジェクト: phongphuhanam/TensorRT
    def test_log_file(self):
        logger = Logger()
        with util.NamedTemporaryFile("w+") as log_file:
            logger.log_file = log_file.name
            assert logger.log_file == log_file.name
            logger.info("Hello")

            log_file.seek(0)
            assert log_file.read() == "[I] Hello\n"
コード例 #23
0
ファイル: test_config.py プロジェクト: phongphuhanam/TensorRT
    def test_tactic_replay(self, trt_config_args):
        with util.NamedTemporaryFile(suffix=".json") as f:
            trt_config_args.parse_args(["--tactic-replay", f.name])
            builder, network = create_network()

            with builder, network, trt_config_args.create_config(builder, network=network) as config:
                recorder = config.algorithm_selector
                assert recorder.make_func == TacticRecorder
                assert recorder.path == f.name
コード例 #24
0
    def test_no_model_file(self):
        with util.NamedTemporaryFile("w+", suffix=".py") as template:
            run_polygraphy_template(["trt-network", "-o", template.name])

            load_network = InvokeFromScript(template.name, "load_network")
            builder, network = load_network()
            with builder, network:
                assert isinstance(builder, trt.Builder)
                assert isinstance(network, trt.INetworkDefinition)
コード例 #25
0
 def test_fp_to_fp16(self):
     with util.NamedTemporaryFile() as outmodel:
         run_polygraphy_convert([
             ONNX_MODELS["identity_identity"].path, "--convert-to=onnx",
             "--fp-to-fp16", "-o", outmodel.name
         ])
         assert onnx.load(
             outmodel.name
         ).graph.value_info[0].type.tensor_type.elem_type == 10
コード例 #26
0
 def test_save_load_inputs(self):
     with util.NamedTemporaryFile() as infile0, util.NamedTemporaryFile(
     ) as infile1:
         run_polygraphy_run([
             ONNX_MODELS["identity"].path, "--onnxrt", "--save-input-data",
             infile0.name
         ])
         run_polygraphy_run([
             ONNX_MODELS["identity"].path,
             "--onnxrt",
             "--load-input-data",
             infile0.name,
             "--save-input-data",
             infile1.name,
         ])  # Copy
         run_polygraphy_run([
             ONNX_MODELS["identity"].path, "--onnxrt", "--load-input-data",
             infile0.name, infile1.name
         ])
コード例 #27
0
ファイル: test_data.py プロジェクト: phongphuhanam/TensorRT
    def test_merge_inputs_outputs(self):
        with util.NamedTemporaryFile() as inps, util.NamedTemporaryFile(
        ) as outs, util.NamedTemporaryFile() as merged:
            run_polygraphy_run(
                [
                    ONNX_MODELS["identity"].path, "--onnxrt", "--save-inputs",
                    inps.name, "--save-outputs", outs.name
                ],
                disable_verbose=True,
            )

            run_polygraphy_data(
                ["to-input", inps.name, outs.name, "-o", merged.name])

            merged_data = util.load_json(merged.name)
            assert len(merged_data) == 1
            assert list(merged_data[0].keys()) == ["x", "y"]
            assert all(
                isinstance(val, np.ndarray) for val in merged_data[0].values())
コード例 #28
0
 def test_polygraphy_run_gen_script(self):
     with util.NamedTemporaryFile(mode="w") as f:
         run_polygraphy_run([
             "--gen-script={:}".format(f.name), ONNX_MODELS["identity"].path
         ])
         with open(f.name, "r") as script:
             print(script.read())
         env = copy.deepcopy(os.environ)
         env.update({"PYTHONPATH": ROOT_DIR})
         check_subprocess(sp.run([sys.executable, f.name], env=env))
コード例 #29
0
 def test_int8_calibration_cache(self):
     with util.NamedTemporaryFile() as outpath:
         cmd = [
             ONNX_MODELS["identity"].path, "--trt", "--int8",
             "--calibration-cache", outpath.name
         ]
         if mod.version(trt.__version__) >= mod.version("7.0"):
             cmd += ["--onnxrt"]
         run_polygraphy_run(cmd)
         assert is_file_non_empty(outpath.name)
コード例 #30
0
ファイル: test_config.py プロジェクト: phongphuhanam/TensorRT
    def test_tactics(self, trt_config_args, opt, cls):
        with util.NamedTemporaryFile("w+", suffix=".json") as f:
            if opt == "--load-tactics":
                TacticReplayData().save(f)

            trt_config_args.parse_args([opt, f.name])
            builder, network = create_network()
            with builder, network, trt_config_args.create_config(builder, network=network) as config:
                recorder = config.algorithm_selector
                assert recorder.make_func == cls
                assert recorder.path == f.name