Пример #1
0
    def __init__(self,
                 model_path=None,
                 model_content=None,
                 experimental_delegates=None,
                 num_threads=None):
        """Constructor.

    Args:
      model_path: Path to TF-Lite Flatbuffer file.
      model_content: Content of model.
      experimental_delegates: Experimental. Subject to change. List of
        [TfLiteDelegate](https://www.tensorflow.org/lite/performance/delegates)
          objects returned by lite.load_delegate().
      num_threads: Sets the number of threads used by the interpreter and
        available to CPU kernels. If not set, the interpreter will use an
        implementation-dependent default number of threads. Currently,
        only a subset of kernels, such as conv, support multi-threading.

    Raises:
      ValueError: If the interpreter was unable to create.
    """
        if not hasattr(self, '_custom_op_registerers'):
            self._custom_op_registerers = []
        if model_path and not model_content:
            self._interpreter = (_interpreter_wrapper.CreateWrapperFromFile(
                model_path, self._custom_op_registerers))
            if not self._interpreter:
                raise ValueError('Failed to open {}'.format(model_path))
        elif model_content and not model_path:
            # Take a reference, so the pointer remains valid.
            # Since python strings are immutable then PyString_XX functions
            # will always return the same pointer.
            self._model_content = model_content
            self._interpreter = (_interpreter_wrapper.CreateWrapperFromBuffer(
                model_content, self._custom_op_registerers))
        elif not model_content and not model_path:
            raise ValueError(
                '`model_path` or `model_content` must be specified.')
        else:
            raise ValueError(
                'Can\'t both provide `model_path` and `model_content`')

        if num_threads is not None:
            if not isinstance(num_threads, int):
                raise ValueError('type of num_threads should be int')
            if num_threads < 1:
                raise ValueError('num_threads should >= 1')
            self._interpreter.SetNumThreads(num_threads)

        # Each delegate is a wrapper that owns the delegates that have been loaded
        # as plugins. The interpreter wrapper will be using them, but we need to
        # hold them in a list so that the lifetime is preserved at least as long as
        # the interpreter wrapper.
        self._delegates = []
        if experimental_delegates:
            self._delegates = experimental_delegates
            for delegate in self._delegates:
                self._interpreter.ModifyGraphWithDelegate(
                    delegate._get_native_delegate_pointer())  # pylint: disable=protected-access
Пример #2
0
    def __init__(self,
                 model_path=None,
                 model_content=None,
                 experimental_delegates=None):
        """Constructor.

    Args:
      model_path: Path to TF-Lite Flatbuffer file.
      model_content: Content of model.
      experimental_delegates: Experimental. Subject to change. List of
        [TfLiteDelegate](https://www.tensorflow.org/lite/performance/delegates)
          objects returned by lite.load_delegate().

    Raises:
      ValueError: If the interpreter was unable to create.
    """
        if not hasattr(self, '_custom_op_registerers'):
            self._custom_op_registerers = []
        if model_path and not model_content:
            self._interpreter = (_interpreter_wrapper.CreateWrapperFromFile(
                model_path, self._custom_op_registerers))
            if not self._interpreter:
                raise ValueError('Failed to open {}'.format(model_path))
        elif model_content and not model_path:
            # Take a reference, so the pointer remains valid.
            # Since python strings are immutable then PyString_XX functions
            # will always return the same pointer.
            self._model_content = model_content
            self._interpreter = (_interpreter_wrapper.CreateWrapperFromBuffer(
                model_content, self._custom_op_registerers))
        elif not model_content and not model_path:
            raise ValueError(
                '`model_path` or `model_content` must be specified.')
        else:
            raise ValueError(
                'Can\'t both provide `model_path` and `model_content`')

        # Each delegate is a wrapper that owns the delegates that have been loaded
        # as plugins. The interpreter wrapper will be using them, but we need to
        # hold them in a list so that the lifetime is preserved at least as long as
        # the interpreter wrapper.
        self._delegates = []
        if experimental_delegates:
            self._delegates = experimental_delegates
            for delegate in self._delegates:
                self._interpreter.ModifyGraphWithDelegate(
                    delegate._get_native_delegate_pointer())  # pylint: disable=protected-access
Пример #3
0
  def __init__(self,
               model_path=None,
               model_content=None,
               experimental_delegates=None,
               num_threads=None,
               experimental_op_resolver_type=OpResolverType.AUTO,
               experimental_preserve_all_tensors=False):
    """Constructor.

    Args:
      model_path: Path to TF-Lite Flatbuffer file.
      model_content: Content of model.
      experimental_delegates: Experimental. Subject to change. List of
        [TfLiteDelegate](https://www.tensorflow.org/lite/performance/delegates)
          objects returned by lite.load_delegate().
      num_threads: Sets the number of threads used by the interpreter and
        available to CPU kernels. If not set, the interpreter will use an
        implementation-dependent default number of threads. Currently, only a
        subset of kernels, such as conv, support multi-threading. num_threads
        should be >= -1. Setting num_threads to 0 has the effect to disable
        multithreading, which is equivalent to setting num_threads to 1. If set
        to the value -1, the number of threads used will be
        implementation-defined and platform-dependent.
      experimental_op_resolver_type: The op resolver used by the interpreter. It
        must be an instance of OpResolverType. By default, we use the built-in
        op resolver which corresponds to tflite::ops::builtin::BuiltinOpResolver
        in C++.
      experimental_preserve_all_tensors: If true, then intermediate tensors
        used during computation are preserved for inspection. Otherwise, reading
        intermediate tensors provides undefined values.

    Raises:
      ValueError: If the interpreter was unable to create.
    """
    if not hasattr(self, '_custom_op_registerers'):
      self._custom_op_registerers = []

    op_resolver_id = _get_op_resolver_id(experimental_op_resolver_type)
    if op_resolver_id is None:
      raise ValueError('Unrecognized passed in op resolver type: {}'.format(
          experimental_op_resolver_type))

    if model_path and not model_content:
      custom_op_registerers_by_name = [
          x for x in self._custom_op_registerers if isinstance(x, str)
      ]
      custom_op_registerers_by_func = [
          x for x in self._custom_op_registerers if not isinstance(x, str)
      ]
      self._interpreter = (
          _interpreter_wrapper.CreateWrapperFromFile(
              model_path, op_resolver_id, custom_op_registerers_by_name,
              custom_op_registerers_by_func,
              experimental_preserve_all_tensors))
      if not self._interpreter:
        raise ValueError('Failed to open {}'.format(model_path))
    elif model_content and not model_path:
      custom_op_registerers_by_name = [
          x for x in self._custom_op_registerers if isinstance(x, str)
      ]
      custom_op_registerers_by_func = [
          x for x in self._custom_op_registerers if not isinstance(x, str)
      ]
      # Take a reference, so the pointer remains valid.
      # Since python strings are immutable then PyString_XX functions
      # will always return the same pointer.
      self._model_content = model_content
      self._interpreter = (
          _interpreter_wrapper.CreateWrapperFromBuffer(
              model_content, op_resolver_id, custom_op_registerers_by_name,
              custom_op_registerers_by_func,
              experimental_preserve_all_tensors))
    elif not model_content and not model_path:
      raise ValueError('`model_path` or `model_content` must be specified.')
    else:
      raise ValueError('Can\'t both provide `model_path` and `model_content`')

    if num_threads is not None:
      if not isinstance(num_threads, int):
        raise ValueError('type of num_threads should be int')
      if num_threads < 1:
        raise ValueError('num_threads should >= 1')
      self._interpreter.SetNumThreads(num_threads)

    # Each delegate is a wrapper that owns the delegates that have been loaded
    # as plugins. The interpreter wrapper will be using them, but we need to
    # hold them in a list so that the lifetime is preserved at least as long as
    # the interpreter wrapper.
    self._delegates = []
    if experimental_delegates:
      self._delegates = experimental_delegates
      for delegate in self._delegates:
        self._interpreter.ModifyGraphWithDelegate(
            delegate._get_native_delegate_pointer())  # pylint: disable=protected-access
    self._signature_defs = self.get_signature_list()

    self._metrics = metrics.TFLiteMetrics()
    self._metrics.increase_counter_interpreter_creation()