Ejemplo n.º 1
0
    def add_onnx_loader(self, script, disable_outputs=None, suffix=None):
        if self.model_args.model_type == "onnx":
            script.add_import(imports=["OnnxFromPath"],
                              frm="polygraphy.backend.onnx")
            loader_str = Script.invoke("OnnxFromPath",
                                       self.model_args.model_file)
            loader_name = script.add_loader(loader_str,
                                            "load_onnx",
                                            suffix=suffix)
        else:
            if self.tf2onnx_loader_args is None:
                G_LOGGER.critical(
                    "Could not load: {:}. Is it an ONNX model?".format(
                        self.model_args.model_file))
            loader_name = self.tf2onnx_loader_args.add_to_script(script)

        modify_onnx_str = self._get_modify_onnx_str(
            script, loader_name, disable_outputs=disable_outputs)
        if modify_onnx_str is not None:
            loader_name = script.add_loader(modify_onnx_str, "modify_onnx")

        SAVE_ONNX = "SaveOnnx"
        save_onnx_str = Script.invoke(SAVE_ONNX,
                                      loader_name,
                                      path=self.save_onnx)
        if save_onnx_str != Script.invoke(SAVE_ONNX, loader_name):
            script.add_import(imports=[SAVE_ONNX],
                              frm="polygraphy.backend.onnx")
            loader_name = script.add_loader(save_onnx_str, "save_onnx")

        return loader_name
Ejemplo n.º 2
0
def add_trt_runner(script, args, data_loader_name):
    script.add_import(imports=["TrtRunner"], frm="polygraphy.backend.trt")

    if args.model_type == "engine":
        loader_name = tool_util.add_trt_serialized_engine_loader(script, args)
    else:
        script.add_import(imports=["EngineFromNetwork"],
                          frm="polygraphy.backend.trt")
        loader_name = tool_util.add_trt_network_loader(script, args)
        config_loader_name = tool_util.add_trt_config_loader(
            script, args, data_loader_name=data_loader_name)
        loader_str = Script.invoke("EngineFromNetwork",
                                   loader_name,
                                   config=config_loader_name)
        loader_name = script.add_loader(loader_str, "build_engine")

    SAVE_ENGINE = "SaveEngine"
    save_engine = Script.invoke(SAVE_ENGINE,
                                loader_name,
                                path=args_util.get(args, "save_engine"))
    if save_engine != Script.invoke(SAVE_ENGINE, loader_name):
        script.add_import(imports=[SAVE_ENGINE], frm="polygraphy.backend.trt")
        loader_name = script.add_loader(save_engine, "save_engine")

    script.add_runner(Script.invoke("TrtRunner", loader_name))
Ejemplo n.º 3
0
    def add_to_script(self, script, data_loader_name):
        script.add_import(imports=["TrtRunner"], frm="polygraphy.backend.trt")

        if self.model_args.model_type == "engine":
            loader_name = self.trt_loader_args.add_trt_serialized_engine_loader(
                script)
        else:
            script.add_import(imports=["EngineFromNetwork"],
                              frm="polygraphy.backend.trt")
            loader_name = self.trt_loader_args.add_trt_network_loader(script)
            config_loader_name = self.trt_loader_args.add_trt_config_loader(
                script, data_loader_name)
            loader_str = Script.invoke("EngineFromNetwork",
                                       loader_name,
                                       config=config_loader_name)
            loader_name = script.add_loader(loader_str, "build_engine")

        SAVE_ENGINE = "SaveEngine"
        save_engine = Script.invoke(SAVE_ENGINE,
                                    loader_name,
                                    path=self.save_engine)
        if save_engine != Script.invoke(SAVE_ENGINE, loader_name):
            script.add_import(imports=[SAVE_ENGINE],
                              frm="polygraphy.backend.trt")
            loader_name = script.add_loader(save_engine, "save_engine")

        runner_name = script.add_loader(
            Script.invoke("TrtRunner", loader_name), "trt_runner")
        script.add_runner(runner_name)
        return runner_name
Ejemplo n.º 4
0
        def _make_data_loader(script):
            data_loader_name = Inline("data_loader")

            input_metadata_str = Inline(
                repr(self.model_args.input_shapes
                     )) if self.model_args.input_shapes else None
            if input_metadata_str:
                script.add_import(imports=["TensorMetadata"],
                                  frm="polygraphy.common")

            data_loader = Script.invoke_if_nondefault(
                "DataLoader",
                seed=self.seed,
                iterations=self.iterations,
                input_metadata=input_metadata_str,
                int_range=self.int_range,
                float_range=self.float_range)
            if data_loader is not None:
                script.add_import(imports=["DataLoader"],
                                  frm="polygraphy.comparator")
                script.append_prefix(
                    Script.format_str("\n# Inference Inputs Loader\n{:} = {:}",
                                      data_loader_name, Inline(data_loader)))
            else:
                data_loader_name = None
            return data_loader_name
Ejemplo n.º 5
0
def add_onnx_loader(script, args, disable_outputs=None, suffix=None):
    if args_util.get(args, "model_type") == "onnx":
        script.add_import(imports=["OnnxFromPath"], frm="polygraphy.backend.onnx")
        loader_str = Script.invoke("OnnxFromPath", args_util.get(args, "model_file"))
        loader_name = script.add_loader(loader_str, "load_onnx", suffix=suffix)
    else:
        G_LOGGER.verbose("Attempting to load as a TensorFlow model, using TF2ONNX to convert to ONNX. "
                       "If this is not correct, please specify --model-type", mode=LogMode.ONCE)
        script.add_import(imports=["OnnxFromTfGraph"], frm="polygraphy.backend.onnx")
        loader_str = Script.invoke("OnnxFromTfGraph", add_tf_loader(script, args, disable_outputs=True, suffix=suffix),
                                opset=args_util.get(args, "opset"), fold_constant=False if args_util.get(args, "no_const_folding") else None)
        loader_name = script.add_loader(loader_str, "export_onnx_from_tf", suffix=suffix)

    modify_onnx_str = get_modify_onnx_str(script, args, loader_name, disable_outputs=disable_outputs)
    if modify_onnx_str is not None:
        loader_name = script.add_loader(modify_onnx_str, "modify_onnx")

    save_onnx = args_util.get(args, "save_onnx")
    SAVE_ONNX = "SaveOnnx"
    save_onnx_str = Script.invoke(SAVE_ONNX, loader_name, path=save_onnx)
    if save_onnx_str != Script.invoke(SAVE_ONNX, loader_name):
        script.add_import(imports=[SAVE_ONNX], frm="polygraphy.backend.onnx")
        loader_name = script.add_loader(save_onnx_str, "save_onnx")

    return loader_name
Ejemplo n.º 6
0
    def add_to_script(self, script, data_loader_name):
        script.add_import(imports=["Comparator"], frm="polygraphy.comparator")
        script.add_import(imports=["sys"])

        RESULTS_VAR_NAME = Inline("results")

        comparator_run = Script.invoke("Comparator.run",
                                       script.get_runners(),
                                       warm_up=self.warm_up,
                                       data_loader=data_loader_name,
                                       use_subprocess=self.use_subprocess,
                                       save_inputs_path=self.save_inputs)
        script.append_suffix(
            Script.format_str("\n# Runner Execution\n{results} = {:}",
                              Inline(comparator_run),
                              results=RESULTS_VAR_NAME))

        if self.save_results:
            G_LOGGER.verbose("Will save runner results to: {:}".format(
                self.save_results))
            script.add_import(imports=["misc"], frm="polygraphy.util")
            script.append_suffix(
                Script.format_str(
                    "\n# Save results\nmisc.pickle_save({:}, {results})",
                    self.save_results,
                    results=RESULTS_VAR_NAME))

        return RESULTS_VAR_NAME
Ejemplo n.º 7
0
def add_trt_config_loader(script, args, data_loader_name):
    profiles = []
    for (min_shape, opt_shape, max_shape) in args_util.get(args, "profiles"):
        profile_str = "Profile()"
        for name in min_shape.keys():
            profile_str += Script.format_str(".add({:}, min={:}, opt={:}, max={:})", name, min_shape[name], opt_shape[name], max_shape[name])
        profiles.append(Inline(profile_str))
    if profiles:
        script.add_import(imports=["Profile"], frm="polygraphy.backend.trt")
        sep = Inline("\n{:}".format(constants.TAB))
        profiles = Script.format_str("[{:}{:}\n]", sep, Inline((",{:}".format(sep)).join(profiles)))
        profile_name = script.add_loader(profiles, "profiles")
    else:
        profile_name = None

    calibrator = None
    if args_util.get(args, "int8"):
        script.add_import(imports=["DataLoader"], frm="polygraphy.comparator")
        script.add_import(imports=["Calibrator"], frm="polygraphy.backend.trt")
        calibrator = Script.invoke("Calibrator", data_loader=Inline(data_loader_name) if data_loader_name else Inline("DataLoader()"),
                                   cache=args_util.get(args, "calibration_cache"))

    config_loader_str = Script.invoke_if_nondefault("CreateTrtConfig", max_workspace_size=args_util.get(args, "workspace"), tf32=args_util.get(args, "tf32"),
                                                    fp16=args_util.get(args, "fp16"), int8=args_util.get(args, "int8"), strict_types=args_util.get(args, "strict_types"),
                                                    profiles=profile_name, calibrator=Inline(calibrator) if calibrator else None)
    if config_loader_str is not None:
        script.add_import(imports=["CreateConfig as CreateTrtConfig"], frm="polygraphy.backend.trt")
        config_loader_name = script.add_loader(config_loader_str, "create_trt_config")
    else:
        config_loader_name = None
    return config_loader_name
Ejemplo n.º 8
0
def add_onnxrt_runner(script, args):
    script.add_import(imports=["OnnxrtRunner"],
                      frm="polygraphy.backend.onnxrt")
    onnx_name = tool_util.add_serialized_onnx_loader(script, args)

    script.add_import(imports=["SessionFromOnnxBytes"],
                      frm="polygraphy.backend.onnxrt")
    loader_name = script.add_loader(
        Script.invoke("SessionFromOnnxBytes", onnx_name),
        "build_onnxrt_session")

    script.add_runner(Script.invoke("OnnxrtRunner", loader_name))
Ejemplo n.º 9
0
    def add_trt_serialized_engine_loader(self, script):
        script.add_import(imports=["EngineFromBytes"],
                          frm="polygraphy.backend.trt")
        script.add_import(imports=["BytesFromPath"],
                          frm="polygraphy.backend.common")

        load_engine = script.add_loader(
            Script.invoke("BytesFromPath", self.model_args.model_file),
            "load_engine")
        return script.add_loader(
            Script.invoke("EngineFromBytes",
                          self._wrap_if_plugins(script, load_engine)),
            "deserialize_engine")
Ejemplo n.º 10
0
    def add_trt_config_loader(self, script, data_loader_name):
        profiles = []
        profile_args = tools_util.parse_profile_shapes(
            self.model_args.input_shapes, self.trt_min_shapes,
            self.trt_opt_shapes, self.trt_max_shapes)
        for (min_shape, opt_shape, max_shape) in profile_args:
            profile_str = "Profile()"
            for name in min_shape.keys():
                profile_str += Script.format_str(
                    ".add({:}, min={:}, opt={:}, max={:})", name,
                    min_shape[name], opt_shape[name], max_shape[name])
            profiles.append(Inline(profile_str))
        if profiles:
            script.add_import(imports=["Profile"],
                              frm="polygraphy.backend.trt")
            sep = Inline("\n{:}".format(constants.TAB))
            profiles = Script.format_str(
                "[{:}{:}\n]", sep, Inline((",{:}".format(sep)).join(profiles)))
            profile_name = script.add_loader(profiles, "profiles")
        else:
            profile_name = None

        calibrator = None
        if self.int8:
            script.add_import(imports=["Calibrator"],
                              frm="polygraphy.backend.trt")
            script.add_import(imports=["DataLoader"],
                              frm="polygraphy.comparator")
            calibrator = Script.invoke(
                "Calibrator",
                data_loader=Inline(data_loader_name)
                if data_loader_name else Inline("DataLoader()"),
                cache=self.calibration_cache)

        config_loader_str = Script.invoke_if_nondefault(
            "CreateTrtConfig",
            max_workspace_size=self.workspace,
            tf32=self.tf32,
            fp16=self.fp16,
            int8=self.int8,
            strict_types=self.strict_types,
            profiles=profile_name,
            calibrator=Inline(calibrator) if calibrator else None)
        if config_loader_str is not None:
            script.add_import(imports=["CreateConfig as CreateTrtConfig"],
                              frm="polygraphy.backend.trt")
            config_loader_name = script.add_loader(config_loader_str,
                                                   "create_trt_config")
        else:
            config_loader_name = None
        return config_loader_name
Ejemplo n.º 11
0
def add_tf_loader(script, args, disable_outputs=None, suffix=None):
    if disable_outputs:
        outputs = None
    else:
        outputs = _get_outputs_arg(script, args, "tf_outputs")

    model_file = args_util.get(args, "model_file")
    model_type = args_util.get(args, "model_type")

    save_pb = args_util.get(args, "save_pb")
    save_tensorboard = args_util.get(args, "save_tensorboard")

    if model_type == "ckpt":
        G_LOGGER.verbose("Loading a TensorFlow checkpoint. Please ensure you are not using the --use-subprocess flag".format(model_file), mode=LogMode.ONCE)
        script.add_import(imports=["GraphFromCkpt"], frm="polygraphy.backend.tf")
        loader_id = "load_ckpt"
        loader_str = Script.invoke("GraphFromCkpt", model_file, args_util.get(args, "ckpt"))
    elif model_type == "keras":
        script.add_import(imports=["GraphFromKeras"], frm="polygraphy.backend.tf")
        loader_id = "load_keras"
        loader_str = Script.invoke("GraphFromKeras", model_file)
    else:
        script.add_import(imports=["GraphFromFrozen"], frm="polygraphy.backend.tf")
        G_LOGGER.verbose("Attempting to load as a frozen graph. If this is not correct, please specify --model-type", mode=LogMode.ONCE)
        loader_id = "load_frozen"
        loader_str = Script.invoke("GraphFromFrozen", model_file)

    loader_name = script.add_loader(loader_str, loader_id, suffix=suffix)

    if args_util.get(args, "freeze_graph"):
        script.add_import(imports=["OptimizeGraph"], frm="polygraphy.backend.tf")
        loader_name = script.add_loader(Script.invoke("OptimizeGraph", loader_name), "optimize_graph", suffix=suffix)
    if args_util.get(args, "tftrt"):
        script.add_import(imports=["UseTfTrt"], frm="polygraphy.backend.tf")
        loader_str = Script.invoke("UseTfTrt", loader_name, max_workspace_size=args_util.get(args, "workspace"), fp16=args_util.get(args, "fp16"), int8=args_util.get(args, "int8"),
                                max_batch_size=args_util.get(args, "batch_size"), is_dynamic_op=args_util.get(args, "dynamic_op"), minimum_segment_size=args_util.get(args, "minimum_segment_size"))
        loader_name = script.add_loader(loader_str, "use_tftrt", suffix=suffix)

    MODIFY_TF = "ModifyGraph"
    modify_tf_str = Script.invoke(MODIFY_TF, loader_name, outputs=outputs)
    if modify_tf_str != Script.invoke(MODIFY_TF, loader_name):
        script.add_import(imports=[MODIFY_TF], frm="polygraphy.backend.tf")
        loader_name = script.add_loader(modify_tf_str, "modify_tf")

    engine_dir = None
    if args_util.get(args, "tftrt"):
        engine_dir = args_util.get(args, "save_engine")

    WRITE_TF = "SaveGraph"
    write_tf_str = Script.invoke(WRITE_TF, loader_name, path=save_pb, tensorboard_dir=save_tensorboard, engine_dir=engine_dir)
    if write_tf_str != Script.invoke(WRITE_TF, loader_name):
        script.add_import(imports=[WRITE_TF], frm="polygraphy.backend.tf")
        loader_name = script.add_loader(write_tf_str, "save_tf")

    return loader_name
Ejemplo n.º 12
0
def add_serialized_onnx_loader(script, args, disable_outputs=None):
    model_file = args_util.get(args, "model_file")

    needs_modify = get_modify_onnx_str(script, args, "check_needs_modify", disable_outputs) is not None
    should_import_raw = args_util.get(args, "model_type") == "onnx" and not needs_modify

    if should_import_raw:
        script.add_import(imports=["BytesFromPath"], frm="polygraphy.backend.common")
        onnx_loader = script.add_loader(Script.invoke("BytesFromPath", model_file), "load_serialized_onnx")
    else:
        script.add_import(imports=["BytesFromOnnx"], frm="polygraphy.backend.onnx")
        onnx_loader = add_onnx_loader(script, args, disable_outputs=disable_outputs)
        onnx_loader = script.add_loader(Script.invoke("BytesFromOnnx", onnx_loader), "serialize_onnx")
    return onnx_loader
Ejemplo n.º 13
0
    def add_to_script(self, script):
        script.add_import(imports=["OnnxrtRunner"],
                          frm="polygraphy.backend.onnxrt")
        onnx_name = self.onnx_loader_args.add_serialized_onnx_loader(script)

        script.add_import(imports=["SessionFromOnnxBytes"],
                          frm="polygraphy.backend.onnxrt")
        loader_name = script.add_loader(
            Script.invoke("SessionFromOnnxBytes", onnx_name),
            "build_onnxrt_session")

        runner_name = script.add_loader(
            Script.invoke("OnnxrtRunner", loader_name), "onnxrt_runner")
        script.add_runner(runner_name)
        return runner_name
Ejemplo n.º 14
0
def add_onnxtf_runner(script, args):
    script.add_import(imports=["OnnxTfRunner", "OnnxFromPath"],
                      frm="polygraphy.backend.onnx")
    script.add_runner(
        Script.invoke(
            "OnnxTfRunner",
            tool_util.add_onnx_loader(script, args, suffix="_onnxtf")))
Ejemplo n.º 15
0
def _wrap_if_plugins(script, args, obj_name):
    plugins = args_util.get(args, "plugins")
    if plugins:
        script.add_import(imports=["LoadPlugins"], frm="polygraphy.backend.trt")
        loader_str = Script.invoke("LoadPlugins", obj_name, plugins=plugins)
        obj_name = script.add_loader(loader_str, "load_plugins")
    return obj_name
Ejemplo n.º 16
0
def add_tf_runner(script, args):
    script.add_import(imports=["TfRunner"], frm="polygraphy.backend.tf")

    graph_name = tool_util.add_tf_loader(script, args)
    config_name = tool_util.add_tf_config_loader(script, args)

    script.add_import(imports=["SessionFromGraph"],
                      frm="polygraphy.backend.tf")
    loader_name = script.add_loader(
        Script.invoke("SessionFromGraph", graph_name, config=config_name),
        "build_tf_session")

    runner_str = Script.invoke("TfRunner",
                               loader_name,
                               timeline_path=args.save_timeline)
    script.add_runner(runner_str)
Ejemplo n.º 17
0
def build_script(args, cmd_run=None):
    script = Script(summary=generate_summary(args.model_file, args.runners,
                                             args.load_results))
    tool_util.add_logger_settings(script, args)

    data_loader_name = tool_util.add_data_loader(script, args)

    for runner_arg in args.runners:
        add_runner_func = {
            "tf":
            add_tf_runner,
            "onnxrt":
            add_onnxrt_runner,
            "onnxtf":
            add_onnxtf_runner,
            "cntk":
            add_cntk_runner,
            "trt":
            lambda script, args: add_trt_runner(script, args, data_loader_name
                                                ),
            "trt_legacy":
            add_trt_legacy_runner,
        }[runner_arg]
        add_runner_func(script, args)

    add_comparator(script,
                   args,
                   data_loader_name=data_loader_name,
                   cmd_run=cmd_run)
    return str(script)
Ejemplo n.º 18
0
 def get_data_loader(self):
     script = Script()
     data_loader_name = self.add_to_script(script)
     if data_loader_name is None:  # All arguments are default
         from polygraphy.comparator import DataLoader
         return DataLoader()
     exec(str(script), globals(), locals())
     return locals()[data_loader_name]
Ejemplo n.º 19
0
    def add_to_script(self, script):
        script.add_import(imports=["TfRunner"], frm="polygraphy.backend.tf")

        graph_name = self.tf_loader_args.add_to_script(script)
        config_name = self.tf_config_args.add_to_script(script)

        script.add_import(imports=["SessionFromGraph"],
                          frm="polygraphy.backend.tf")
        loader_name = script.add_loader(
            Script.invoke("SessionFromGraph", graph_name, config=config_name),
            "build_tf_session")

        runner_name = script.add_loader(
            Script.invoke("TfRunner",
                          loader_name,
                          timeline_path=self.timeline_path), "tf_runner")
        script.add_runner(runner_name)
        return runner_name
Ejemplo n.º 20
0
 def _wrap_if_plugins(self, script, obj_name):
     if self.plugins:
         script.add_import(imports=["LoadPlugins"],
                           frm="polygraphy.backend.trt")
         loader_str = Script.invoke("LoadPlugins",
                                    obj_name,
                                    plugins=self.plugins)
         obj_name = script.add_loader(loader_str, "load_plugins")
     return obj_name
Ejemplo n.º 21
0
def add_tf_config_loader(script, args):
    config_loader_str = Script.invoke_if_nondefault("CreateConfig", gpu_memory_fraction=args_util.get(args, "gpu_memory_fraction"),
                               allow_growth=args_util.get(args, "allow_growth"), use_xla=args_util.get(args, "xla"))
    if config_loader_str is not None:
        script.add_import(imports=["CreateConfig"], frm="polygraphy.backend.tf")
        config_loader_name = script.add_loader(config_loader_str, "create_tf_config")
    else:
        config_loader_name = None
    return config_loader_name
Ejemplo n.º 22
0
 def add_to_script(self, script):
     config_loader_str = Script.invoke_if_nondefault("CreateConfig", gpu_memory_fraction=self.gpu_memory_fraction,
                             allow_growth=self.allow_growth, use_xla=self.xla)
     if config_loader_str is not None:
         script.add_import(imports=["CreateConfig"], frm="polygraphy.backend.tf")
         config_loader_name = script.add_loader(config_loader_str, "create_tf_config")
     else:
         config_loader_name = None
     return config_loader_name
Ejemplo n.º 23
0
def get_modify_onnx_str(script, args, loader_name, disable_outputs=None):
    if disable_outputs:
        outputs = None
        exclude_outputs = None
    else:
        outputs = _get_outputs_arg(script, args, "onnx_outputs")
        exclude_outputs = args_util.get(args, "onnx_exclude_outputs")

    if hasattr(args, "shape_inference"):
        do_shape_inference = args_util.get(args, "shape_inference")
    else:
        do_shape_inference = None if args_util.get(args, "no_shape_inference") else True

    MODIFY_ONNX = "ModifyOnnx"
    modify_onnx_str = Script.invoke(MODIFY_ONNX, loader_name, do_shape_inference=do_shape_inference,
                                    outputs=outputs, exclude_outputs=exclude_outputs)
    if modify_onnx_str != Script.invoke(MODIFY_ONNX, loader_name):
        script.add_import(imports=[MODIFY_ONNX], frm="polygraphy.backend.onnx")
        return modify_onnx_str
    return None
Ejemplo n.º 24
0
    def _get_modify_onnx_str(self, script, loader_name, disable_outputs=None):
        if disable_outputs:
            outputs = None
            exclude_outputs = None
        else:
            outputs = tools_util.get_outputs_for_script(script, self.outputs)
            exclude_outputs = self.exclude_outputs

        MODIFY_ONNX = "ModifyOnnx"
        modify_onnx_str = Script.invoke(
            MODIFY_ONNX,
            loader_name,
            do_shape_inference=self.do_shape_inference,
            outputs=outputs,
            exclude_outputs=exclude_outputs)
        if modify_onnx_str != Script.invoke(MODIFY_ONNX, loader_name):
            script.add_import(imports=[MODIFY_ONNX],
                              frm="polygraphy.backend.onnx")
            return modify_onnx_str
        return None
Ejemplo n.º 25
0
def add_trt_network_loader(script, args):
    model_file = args_util.get(args, "model_file")
    outputs = _get_outputs_arg(script, args, "trt_outputs")

    if args_util.get(args, "network_api"):
        CREATE_NETWORK_FUNC = Inline("create_network")

        script.add_import(imports=["CreateNetwork"], frm="polygraphy.backend.trt")
        script.add_import(imports=["extend"], frm="polygraphy.common.func")

        script.append_prefix("# Manual TensorRT network creation")
        script.append_prefix("@extend(CreateNetwork())")
        script.append_prefix("def {:}(builder, network):".format(CREATE_NETWORK_FUNC))
        script.append_prefix("{tab}import tensorrt as trt\n".format(tab=constants.TAB))
        script.append_prefix("{tab}# Define your network here. Make sure to mark outputs!".format(tab=constants.TAB))
        net_inputs = args_util.get(args, "inputs")
        if net_inputs:
            for name, (dtype, shape) in net_inputs.items():
                script.append_prefix("{tab}{name} = network.add_input(name='{name}', shape={shape}, dtype=trt.float32) # TODO: Set dtype".format(
                                        name=name, shape=shape, tab=constants.TAB))
        script.append_prefix("{tab}# TODO: network.mark_output(...)\n".format(tab=constants.TAB))
        return CREATE_NETWORK_FUNC


    if args_util.get(args, "ext"):
        script.add_import(imports=["NetworkFromOnnxPath"], frm="polygraphy.backend.trt")
        loader_str = Script.invoke("NetworkFromOnnxPath", _wrap_if_plugins(script, args, model_file), explicit_precision=args_util.get(args, "explicit_precision"))
        loader_name = script.add_loader(loader_str, "parse_network_from_onnx")
    else:
        script.add_import(imports=["NetworkFromOnnxBytes"], frm="polygraphy.backend.trt")
        onnx_loader = add_serialized_onnx_loader(script, args, disable_outputs=True)
        loader_str = Script.invoke("NetworkFromOnnxBytes", _wrap_if_plugins(script, args, onnx_loader), explicit_precision=args_util.get(args, "explicit_precision"))
        loader_name = script.add_loader(loader_str, "parse_network_from_onnx")

    MODIFY_NETWORK = "ModifyNetwork"
    modify_network_str = Script.invoke(MODIFY_NETWORK, loader_name, outputs=outputs, exclude_outputs=args_util.get(args, "trt_exclude_outputs"))
    if modify_network_str != Script.invoke(MODIFY_NETWORK, loader_name):
        script.add_import(imports=[MODIFY_NETWORK], frm="polygraphy.backend.trt")
        loader_name = script.add_loader(modify_network_str, "modify_network")

    return loader_name
Ejemplo n.º 26
0
    def add_to_script(self, script):
        script.add_import(imports=["TrtLegacyRunner"], frm="polygraphy.backend.trt_legacy")
        G_LOGGER.warning("Legacy TensorRT runner only supports implicit batch TensorFlow/UFF, ONNX, and Caffe models")

        if self.model_args.model_type == "onnx":
            script.add_import(imports=["ParseNetworkFromOnnxLegacy"], frm="polygraphy.backend.trt_legacy")
            onnx_loader = self.onnx_loader_args.add_onnx_loader(script, disable_outputs=True)
            loader_name = script.add_loader(Script.format_str("ParseNetworkFromOnnxLegacy({:})", onnx_loader), "parse_network_from_onnx_legacy")
        elif self.model_args.model_type == "caffe":
            script.add_import(imports=["LoadNetworkFromCaffe"], frm="polygraphy.backend.trt_legacy")
            loader_name = script.add_loader(Script.format_str("LoadNetworkFromCaffe({:}, {:}, {:}, {:})", self.model_args.model_file, self.caffe_model,
                                                                self.trt_outputs, self.batch_size), "parse_network_from_caffe")
        else:
            script.add_import(imports=["LoadNetworkFromUff"], frm="polygraphy.backend.trt_legacy")
            if self.model_args.model_type == "uff":
                script.add_import(imports=["LoadUffFile"], frm="polygraphy.backend.trt_legacy")
                shapes = {name: shape for name, (_, shape) in self.trt_loader_args.input_shapes.items()}
                loader_name = script.add_loader(Script.format_str("LoadUffFile({:}, {:}, {:})", self.model_args.model_file, misc.default_value(shapes, {}), self.trt_outputs), "load_uff_file")
            else:
                script.add_import(imports=["ConvertToUff"], frm="polygraphy.backend.trt_legacy")
                loader_name = script.add_loader(Script.format_str("ConvertToUff({:}, save_uff={:}, preprocessor={:})", self.tf_loader_args.add_to_script(script), self.save_uff, self.preprocessor), "convert_to_uff")
            loader_name = script.add_loader(Script.format_str("LoadNetworkFromUff({:}, uff_order={:})", loader_name, self.uff_order), "uff_network_loader")


        runner_str = Script.format_str("TrtLegacyRunner({:}, {:}, {:}, fp16={:}, tf32={:}, load_engine={:}, save_engine={:}, layerwise={:}, plugins={:})",
                                        loader_name, self.trt_loader_args.workspace, self.batch_size, self.trt_loader_args.fp16, self.trt_loader_args.tf32,
                                        self.model_args.model_file if self.model_args.model_type == "engine" else None,
                                        self.trt_runner_args.save_engine, self.trt_outputs==constants.MARK_ALL, self.trt_loader_args.plugins)


        runner_name = script.add_loader(runner_str, "trt_legacy_runner")
        script.add_runner(runner_name)
        return runner_name
Ejemplo n.º 27
0
def add_data_loader(script, args):
    def omit_none_tuple(tup):
        if all([elem is None for elem in tup]):
            return None
        return tup

    int_range = omit_none_tuple(tup=(args_util.get(args, "int_min"), args_util.get(args, "int_max")))
    float_range = omit_none_tuple(tup=(args_util.get(args, "float_min"), args_util.get(args, "float_max")))

    input_metadata_str = Inline(repr(args_util.get(args, "inputs"))) if args_util.get(args, "inputs") else None
    if input_metadata_str:
        script.add_import(imports=["TensorMetadata"], frm="polygraphy.common")

    data_loader = Script.invoke_if_nondefault("DataLoader", seed=args_util.get(args, "seed"), iterations=args_util.get(args, "iterations"),
                                              input_metadata=input_metadata_str, int_range=int_range, float_range=float_range)
    if data_loader is not None:
        data_loader_name = Inline("data_loader")
        script.add_import(imports=["DataLoader"], frm="polygraphy.comparator")
        script.append_prefix(Script.format_str("\n# Inference Inputs Loader\n{:} = {:}\n", data_loader_name, Inline(data_loader)))
    else:
        data_loader_name = None
    return data_loader_name
Ejemplo n.º 28
0
 def add_to_script(self, script, suffix=None):
     G_LOGGER.verbose(
         "Attempting to load as a TensorFlow model, using TF2ONNX to convert to ONNX. "
         "If this is not correct, please specify --model-type",
         mode=LogMode.ONCE)
     script.add_import(imports=["OnnxFromTfGraph"],
                       frm="polygraphy.backend.onnx")
     loader_str = Script.invoke("OnnxFromTfGraph",
                                self.tf_loader_args.add_to_script(
                                    script,
                                    disable_outputs=True,
                                    suffix=suffix),
                                opset=self.opset,
                                fold_constant=self.fold_constant)
     loader_name = script.add_loader(loader_str,
                                     "export_onnx_from_tf",
                                     suffix=suffix)
     return loader_name
Ejemplo n.º 29
0
    def add_to_script(self, script):
        def _make_data_loader(script):
            data_loader_name = Inline("data_loader")

            input_metadata_str = Inline(
                repr(self.model_args.input_shapes
                     )) if self.model_args.input_shapes else None
            if input_metadata_str:
                script.add_import(imports=["TensorMetadata"],
                                  frm="polygraphy.common")

            data_loader = Script.invoke_if_nondefault(
                "DataLoader",
                seed=self.seed,
                iterations=self.iterations,
                input_metadata=input_metadata_str,
                int_range=self.int_range,
                float_range=self.float_range)
            if data_loader is not None:
                script.add_import(imports=["DataLoader"],
                                  frm="polygraphy.comparator")
                script.append_prefix(
                    Script.format_str("\n# Inference Inputs Loader\n{:} = {:}",
                                      data_loader_name, Inline(data_loader)))
            else:
                data_loader_name = None
            return data_loader_name

        if self.load_inputs:
            script.add_import(imports=["misc"], frm="polygraphy.util")

            data_loader_name = Inline("data_loader")
            script.append_prefix(
                Script.format_str(
                    "# Load inputs\n{data_loader} = []\nfor input_data_path in {load_inputs}:"
                    "\n{tab}{data_loader}.extend(misc.pickle_load(input_data_path))",
                    data_loader=data_loader_name,
                    load_inputs=self.load_inputs,
                    tab=Inline(constants.TAB)))
        else:
            data_loader_name = _make_data_loader(script)
        script.append_prefix("")  # Newline
        return data_loader_name
Ejemplo n.º 30
0
    def build_script(self, args):
        script = Script(
            summary=generate_summary(self.makers[ModelArgs].model_file,
                                     args.runners, args.load_results))

        self.makers[LoggerArgs].add_to_script(script)

        data_loader_name = self.makers[DataLoaderArgs].add_to_script(script)

        for runner_arg in args.runners:
            add_runner_func = {
                "tf":
                self.makers[TfRunnerArgs].add_to_script,
                "onnxrt":
                self.makers[OnnxrtRunnerArgs].add_to_script,
                "onnxtf":
                self.makers[OnnxtfRunnerArgs].add_to_script,
                "trt":
                lambda script: self.makers[TrtRunnerArgs].add_to_script(
                    script, data_loader_name),
                "trt_legacy":
                self.makers[TrtLegacyArgs].add_to_script,
            }[runner_arg]
            add_runner_func(script)

        RESULTS_VAR_NAME = self.makers[ComparatorRunArgs].add_to_script(
            script, data_loader_name=data_loader_name)
        SUCCESS_VAR_NAME = self.makers[ComparatorCompareArgs].add_to_script(
            script, results_name=RESULTS_VAR_NAME)

        cmd_run = Inline("' '.join(sys.argv)")
        script.append_suffix(
            Script.format_str(
                '# Report Results\ncmd_run={cmd}\nif {success}:\n{tab}G_LOGGER.finish("PASSED | Command: {{}}".format(cmd_run))\nelse:\n{tab}G_LOGGER.error("FAILED | Command: {{}}".format(cmd_run))',
                cmd=cmd_run,
                success=SUCCESS_VAR_NAME,
                tab=Inline(constants.TAB)))
        script.append_suffix(
            "sys.exit(0 if {success} else 1)".format(success=SUCCESS_VAR_NAME))

        return str(script)