Exemplo n.º 1
0
 def test_parse_shape_with_dim_param_quoted(self, name, quote):
     meta_args = [
         "{name}:[{quote}batch{quote},3,224,224]".format(name=name,
                                                         quote=quote)
     ]
     meta = args_util.parse_meta(meta_args, includes_dtype=False)
     assert meta[name].shape == ("batch", 3, 224, 224)
Exemplo n.º 2
0
    def parse(self, args):
        def determine_model_type():
            if args_util.get(args, "model_type") is not None:
                return args.model_type.lower()

            if args_util.get(args, "model_file") is None:
                return None

            def use_ext(ext_mapping):
                file_ext = os.path.splitext(args.model_file)[-1]
                if file_ext in ext_mapping:
                    return ext_mapping[file_ext]

            runners = util.default(args_util.get(args, "runners"), [])
            if args_util.get(args, "ckpt") or os.path.isdir(args.model_file):
                return "ckpt"
            elif "tf" in runners or "trt_legacy" in runners:
                if args.caffe_model:
                    return "caffe"
                return use_ext(ModelArgs.EXT_MODEL_TYPE_MAPPING) or "frozen"
            else:
                model_type = use_ext(ModelArgs.EXT_MODEL_TYPE_MAPPING)
                if model_type:
                    return model_type

            G_LOGGER.exit(
                "Could not automatically determine model type for: {:}\n"
                "Please explicitly specify the type with the --model-type option"
                .format(args.model_file))

        if args_util.get(args, "input_shapes"):
            self.input_shapes = args_util.parse_meta(
                args_util.get(args, "input_shapes"),
                includes_dtype=False)  # TensorMetadata
        else:
            self.input_shapes = TensorMetadata()

        self.model_file = args_util.get(args, "model_file")

        if self.model_file:
            G_LOGGER.verbose("Model: {:}".format(self.model_file))
            if not os.path.exists(self.model_file):
                G_LOGGER.warning("Model path does not exist: {:}".format(
                    self.model_file))
            self.model_file = os.path.abspath(self.model_file)

        model_type_str = util.default(self._model_type, determine_model_type())
        self.model_type = ModelArgs.ModelType(
            model_type_str) if model_type_str else None

        if self.model_type == "trt-network-script" and (
                not self.model_file or not self.model_file.endswith(".py")):
            G_LOGGER.exit(
                "TensorRT network scripts must exist and have '.py' extensions. "
                "Note: Provided network script path was: {:}".format(
                    self.model_file))
Exemplo n.º 3
0
    def get_shapes(lst, idx):
        nonlocal default_shapes
        default_shapes = copy.copy(default_shapes)
        if idx < len(lst):
            default_shapes.update(
                args_util.parse_meta(lst[idx], includes_dtype=False))

        # Don't care about dtype, and need to override dynamic dimensions
        shapes = {
            name: util.override_dynamic_shape(shape)
            for name, (_, shape) in default_shapes.items()
        }

        for name, shape in shapes.items():
            if tuple(default_shapes[name].shape) != tuple(shape):
                G_LOGGER.warning(
                    "Input tensor: {:} | For TensorRT profile, overriding dynamic shape: {:} to: {:}"
                    .format(name, default_shapes[name].shape, shape),
                    mode=LogMode.ONCE,
                )

        return shapes
Exemplo n.º 4
0
 def test_parse_shape_dtype_auto(self, name):
     meta_args = ["{name}:auto:auto".format(name=name)]
     meta = args_util.parse_meta(meta_args)
     assert meta[name].shape is None
     assert meta[name].dtype is None
Exemplo n.º 5
0
 def test_parse_shape_dtype(self, name):
     meta_args = ["{name}:[1,3,224,224]:float32".format(name=name)]
     meta = args_util.parse_meta(meta_args)
     assert meta[name].shape == (1, 3, 224, 224)
     assert meta[name].dtype == np.float32
Exemplo n.º 6
0
 def test_parse_dtype_only(self, name):
     meta_args = ["{name}:float32".format(name=name)]
     meta = args_util.parse_meta(meta_args, includes_shape=False)
     assert meta[name].shape is None
     assert meta[name].dtype == np.float32
Exemplo n.º 7
0
 def test_parse_shape_single_dim(self, name):
     meta_args = ["{name}:[1]".format(name=name)]
     meta = args_util.parse_meta(meta_args, includes_dtype=False)
     assert meta[name].shape == (1, )
Exemplo n.º 8
0
 def test_parse_shape_scalar(self, name):
     meta_args = ["{name}:[]".format(name=name)]
     meta = args_util.parse_meta(meta_args, includes_dtype=False)
     assert meta[name].shape == tuple()
Exemplo n.º 9
0
 def test_parse_empty_shape(self, name):
     meta_args = ["{name}:[0,3,0,224]".format(name=name)]
     meta = args_util.parse_meta(meta_args, includes_dtype=False)
     assert meta[name].shape == (0, 3, 0, 224)
     assert meta[name].dtype is None
Exemplo n.º 10
0
 def test_parse_shape_only(self, name):
     meta_args = ["{name}:[1,3,224,224]".format(name=name)]
     meta = args_util.parse_meta(meta_args, includes_dtype=False)
     assert meta[name].shape == (1, 3, 224, 224)
     assert meta[name].dtype is None
Exemplo n.º 11
0
 def test_parse_legacy(self, name):  # Legacy argument format used comma.
     meta_args = ["{:},1x3x224x224".format(name)]
     meta = args_util.parse_meta(meta_args, includes_dtype=False)
     assert meta[name].shape == (1, 3, 224, 224)
     assert meta[name].dtype is None
Exemplo n.º 12
0
    def run_impl(self, args):
        def missing_meta_tensors(input_metadata, output_metadata):
            missing = TensorMetadata()
            for name, (dtype, shape) in input_metadata.items():
                if dtype is None or shape is None:
                    missing.add(name, dtype, shape)
            for name, (dtype, shape) in output_metadata.items():
                if dtype is None:
                    missing.add(name, dtype, shape)
            return missing

        model = super().load_model()

        user_input_metadata = args_util.parse_meta(args.input_meta)
        user_output_metadata = args_util.parse_meta(args.output_meta,
                                                    includes_shape=False)

        # Loads an ONNX-GS graph and create new I/O metadata w/ info missing in user_input/output_metadata.
        def load_graph_and_io_meta(model):
            graph = gs.import_onnx(model)
            TENSOR_MAP = graph.tensors()

            def get_tensor(name):
                if name not in TENSOR_MAP:
                    G_LOGGER.exit(
                        "Tensor: {:} does not exist in the model.".format(
                            name))
                return TENSOR_MAP[name]

            # Makes a TensorMetadata for inputs/outputs using either the user provided information
            # or details derived from tensors.
            def make_io_meta(user_meta, tensors):
                if not user_meta:
                    return tools_util.meta_from_gs_tensors(tensors)

                new_meta = copy.copy(user_meta)
                for name, (dtype, shape) in new_meta.items():
                    tensor = get_tensor(name)
                    new_meta.add(name, dtype or tensor.dtype, shape
                                 or tensor.shape)
                return new_meta

            input_metadata = make_io_meta(user_input_metadata, graph.inputs)
            output_metadata = make_io_meta(user_output_metadata, graph.outputs)
            return graph, input_metadata, output_metadata

        graph, input_metadata, output_metadata = load_graph_and_io_meta(model)

        # If we've already done ONNX shape inference, we should not do it again here.
        skip_shape_inference = self.arg_groups[
            OnnxShapeInferenceArgs].force_fallback or self.arg_groups[
                OnnxShapeInferenceArgs].do_shape_inference
        if missing_meta_tensors(input_metadata,
                                output_metadata) and not skip_shape_inference:
            G_LOGGER.info(
                "Running ONNX shape inference to derive shapes and/or data types for `auto` arguments.\n"
                "To avoid this, you can specify the shapes and data types explicitly."
            )
            model = onnx_backend.infer_shapes(model)
            graph, input_metadata, output_metadata = load_graph_and_io_meta(
                model)

        missing_tensors = missing_meta_tensors(input_metadata, output_metadata)
        if missing_tensors or self.arg_groups[
                OnnxShapeInferenceArgs].force_fallback:
            # Use ONNX runtime with static shapes to infer shapes when all else fails
            # Returns a TensorMetadata for all tensors in the graph.
            if not self.arg_groups[OnnxShapeInferenceArgs].force_fallback:
                G_LOGGER.warning(
                    "Some tensor shapes or dtypes are missing in the model. Note: Tensors with missing information:\n{:}\n"
                    "Will run inference to determine shapes. This may cause some dynamic "
                    "dimensions to become static.\n"
                    "To avoid this, please provide metadata on the command-line. "
                    .format(missing_tensors))
            else:
                G_LOGGER.info(
                    "Forcing fallback shape inference. This will cause dynamic dimensions to become static."
                )

            _, layerwise_meta = self.arg_groups[
                OnnxShapeInferenceArgs].fallback_inference(model)

            def update_meta_from_layerwise(meta, user_meta, set_shapes=True):
                for name in meta:
                    user_dtype, user_shape = None, None
                    if name in user_meta:
                        user_dtype, user_shape = user_meta[
                            name].dtype, user_meta[name].shape

                    # Choose between what the user set, what's in the model, and what
                    # fallback shape inference said.
                    def choose_meta(user, model, fallback):
                        if self.arg_groups[
                                OnnxShapeInferenceArgs].force_fallback:
                            return user or fallback
                        return user or model or fallback

                    if name in layerwise_meta:
                        meta[name].dtype = choose_meta(
                            user_dtype, meta[name].dtype,
                            layerwise_meta[name].dtype)
                        if set_shapes:
                            meta[name].shape = choose_meta(
                                user_shape, meta[name].shape,
                                layerwise_meta[name].shape)
                        G_LOGGER.verbose(
                            "Updated tensor: {:} metadata to: {:}".format(
                                name, meta[name]))
                return meta

            input_metadata = update_meta_from_layerwise(
                input_metadata, user_input_metadata)
            output_metadata = update_meta_from_layerwise(
                output_metadata,
                user_output_metadata,
                set_shapes=self.arg_groups[OnnxShapeInferenceArgs].
                force_fallback)

        graph = onnx_backend.extract_subgraph(graph, input_metadata,
                                              output_metadata)
        super().save_model(super().export_graph(graph))