Пример #1
0
    def fill_defaults(self, network, default_shape_value=None):
        """
        Fill this profile with sane default values for any bindings whose
        shapes have not been set explicitly.

        Args:
            network (trt.INetworkDefinition):
                    The TensorRT network this profile is meant for.
                    This will be used to determine model inputs and their shapes.
            default_shape_value (int):
                    The value to use to override dynamic dimensions.

        Returns:
            Profile: Self
        """
        default_shape_value = util.default(default_shape_value,
                                           constants.DEFAULT_SHAPE_VALUE)

        for idx in range(network.num_inputs):
            inp = network.get_input(idx)

            if inp.name in self:
                continue

            with G_LOGGER.verbosity(
                    G_LOGGER.CRITICAL):  # WAR for spam from TRT
                is_shape_tensor = inp.is_shape_tensor
            if is_shape_tensor:
                rank = inp.shape[0]
                shape = (default_shape_value, ) * rank
                G_LOGGER.warning(
                    "{:} | No values provided; Will use input values: {:} for min/opt/max in profile.\n"
                    .format(trt_util.str_from_tensor(inp, is_shape_tensor),
                            shape, rank),
                    mode=LogMode.ONCE,
                )
                G_LOGGER.warning(
                    "This will cause the shape-tensor to have static values. If this is incorrect, please "
                    "set the range of values for this input shape-tensor.",
                    mode=LogMode.ONCE,
                )
            else:
                shape = util.override_dynamic_shape(inp.shape,
                                                    default_shape_value)
                if shape != inp.shape:
                    G_LOGGER.warning(
                        "{:} | No shapes provided; Will use shape: {:} for min/opt/max in profile.\n"
                        .format(trt_util.str_from_tensor(inp, is_shape_tensor),
                                shape),
                        mode=LogMode.ONCE,
                    )
                    G_LOGGER.warning(
                        "This will cause the tensor to have a static shape. If this is incorrect, please "
                        "set the range of shapes for this input tensor.",
                        mode=LogMode.ONCE,
                    )

            self.add(inp.name, shape, shape, shape)
        return self
Пример #2
0
 def get_static_shape(name, shape):
     static_shape = shape
     if util.is_shape_dynamic(shape):
         static_shape = util.override_dynamic_shape(shape)
         if static_shape != shape and name not in self.user_input_metadata:
             if not util.is_valid_shape_override(static_shape, shape):
                 G_LOGGER.critical("Input tensor: {:} | Cannot override original shape: {:} to {:}".format(name, shape, static_shape))
             G_LOGGER.warning("Input tensor: {:} | Will generate data of shape: {:}.\n"
                              "If this is incorrect, please set input_metadata "
                              "or provide a custom data loader.".format(name, static_shape), mode=LogMode.ONCE)
     return static_shape
Пример #3
0
    def get_shapes(lst, idx):
        nonlocal default_shapes
        default_shapes = copy.copy(default_shapes)
        if idx < len(lst):
            default_shapes.update(
                args_util.parse_meta(lst[idx], includes_dtype=False))

        # Don't care about dtype, and need to override dynamic dimensions
        shapes = {
            name: util.override_dynamic_shape(shape)
            for name, (_, shape) in default_shapes.items()
        }

        for name, shape in shapes.items():
            if tuple(default_shapes[name].shape) != tuple(shape):
                G_LOGGER.warning(
                    "Input tensor: {:} | For TensorRT profile, overriding dynamic shape: {:} to: {:}"
                    .format(name, default_shapes[name].shape, shape),
                    mode=LogMode.ONCE,
                )

        return shapes