def __quantize_model(converter: tensorflow.lite.TFLiteConverter,
                     model: Detector) -> Any:
    print_debug('\nConverting model, this will take some time...')

    # Get sample data to be used during converstion
    image_files: List[str] = [
        os.path.abspath(os.path.join(IMAGES_PATH, image))
        for image in os.listdir(IMAGES_PATH)
    ]
    annotation_files: List[str] = [
        os.path.abspath(os.path.join(ANNOTATIONS_PATH, annotations))
        for annotations in os.listdir(ANNOTATIONS_PATH)
    ]

    try:
        # Set optimizations to perform on model to be compatible with edge TPUs
        converter.optimizations = [tensorflow.lite.Optimize.DEFAULT]
        converter.representative_dataset = lambda: __input_data_generator(
            model, image_files, annotation_files)

        converter.target_spec.supported_ops = [
            tensorflow.lite.OpsSet.TFLITE_BUILTINS_INT8
        ]
        converter.inference_input_type = tensorflow.uint8
        converter.inference_output_type = tensorflow.uint8

        # Start conversion
        return converter.convert()
    except Exception as exception:  # pylint: disable=broad-except
        print_debug('Error: Could not convert model to TFLite')
        print_debug(str(exception))
Beispiel #2
0
  def _set_converter_options_for_calibration(
      self, converter: tf.lite.TFLiteConverter) -> tf.lite.TFLiteConverter:
    """Verify converter options and set required experimental options."""
    if not converter.optimizations:
      converter.optimizations = [tf.lite.Optimize.DEFAULT]
    if not converter.representative_dataset:
      raise ValueError('converter object must set representative_dataset')

    converter.experimental_mlir_quantizer = True
    converter._experimental_calibrate_only = True  # pylint: disable=protected-access
    return converter
Beispiel #3
0
 def _set_converter_options_for_float(
         self,
         converter: tf.lite.TFLiteConverter) -> tf.lite.TFLiteConverter:
     """Verify converter options and set required experimental options."""
     if converter.optimizations:
         converter.optimizations = []
     return converter
Beispiel #4
0
def get_tflite_model(converter: tf.lite.TFLiteConverter, config: Config):
    if config.optimization:
        converter.optimizations.append(tf.lite.Optimize.DEFAULT)

    converter.representative_dataset = config.representative_dataset

    if config.supported_ops:
        converter.target_spec.supported_ops.clear()
        converter.target_spec.supported_ops.update(config.supported_ops)

    if config.supported_types:
        converter.target_spec.supported_types.extend(config.supported_types)

    converter.inference_input_type = config.inference_input_type
    converter.inference_output_type = config.inference_output_type

    tflite_model = converter.convert()
    return tflite_model