Example #1
0
def export(est, export_dir, input_image_size=None):
    """Export graph to SavedModel and TensorFlow Lite.

  Args:
    est: estimator instance.
    export_dir: string, exporting directory.
    input_image_size: int, input image size.

  Raises:
    ValueError: the export directory path is not specified.
  """
    if not export_dir:
        raise ValueError('The export directory path is not specified.')

    if not input_image_size:
        input_image_size = FLAGS.input_image_size
    is_cond_conv = FLAGS.model_name.startswith('efficientnet-condconv')
    batch_size = 1 if is_cond_conv else None  # Use fixed batch size for condconv.

    logging.info('Starting to export model.')
    if (FLAGS.model_name.startswith('efficientnet-lite')
            or FLAGS.model_name.startswith('efficientnet-edgetpu')):
        # lite or edgetpu use binlinear for easier post-quantization.
        resize_method = tf.image.ResizeMethod.BILINEAR
    else:
        resize_method = None
    image_serving_input_fn = imagenet_input.build_image_serving_input_fn(
        input_image_size, batch_size=batch_size, resize_method=resize_method)
    est.export_saved_model(export_dir_base=export_dir,
                           serving_input_receiver_fn=image_serving_input_fn)
Example #2
0
def export(est, export_dir, post_quantize=True):
    """Export graph to SavedModel and TensorFlow Lite.

  Args:
    est: estimator instance.
    export_dir: string, exporting directory.
    post_quantize: boolean, whether to quantize model checkpoint after training.

  Raises:
    ValueError: the export directory path is not specified.
  """
    if not export_dir:
        raise ValueError('The export directory path is not specified.')
    # The guide to serve a exported TensorFlow model is at:
    #    https://www.tensorflow.org/serving/serving_basic
    image_serving_input_fn = imagenet_input.build_image_serving_input_fn(
        FLAGS.input_image_size)

    tf.logging.info('Starting to export model.')
    subfolder = est.export_saved_model(
        export_dir_base=export_dir,
        serving_input_receiver_fn=image_serving_input_fn)

    tf.logging.info('Starting to export TFLite.')
    converter = tf.lite.TFLiteConverter.from_saved_model(
        #   os.path.join(export_dir, str(subfolder)),
        subfolder.decode(),
        input_arrays=['truediv'],
        output_arrays=['logits'])
    tflite_model = converter.convert()
    tflite_file = os.path.join(export_dir, FLAGS.model_name + '.tflite')
    tf.gfile.GFile(tflite_file, 'wb').write(tflite_model)

    if post_quantize:
        tf.logging.info('Starting to export quantized TFLite.')
        converter = tf.lite.TFLiteConverter.from_saved_model(
            # os.path.join(export_dir, subfolder),
            subfolder.decode(),
            input_arrays=['truediv'],
            output_arrays=['logits'])
        converter.post_training_quantize = True
        quant_tflite_model = converter.convert()
        quant_tflite_file = os.path.join(
            export_dir, FLAGS.model_name + '_postquant.tflite')
        tf.gfile.GFile(quant_tflite_file, 'wb').write(quant_tflite_model)
Example #3
0
def export(est, export_dir, params, post_quantize=True):
    """Export graph to SavedModel and TensorFlow Lite.

  Args:
    est: estimator instance.
    export_dir: string, exporting directory.
    params: `ParamsDict` passed to the model from the TPUEstimator.
    post_quantize: boolean, whether to quantize model checkpoint after training.

  Raises:
    ValueError: the export directory path is not specified.
  """
    if not export_dir:
        raise ValueError('The export directory path is not specified.')
    # The guide to serve a exported TensorFlow model is at:
    #    https://www.tensorflow.org/serving/serving_basic
    image_serving_input_fn = imagenet_input.build_image_serving_input_fn(
        params.input_image_size)
    tf.logging.info('Starting to export model.')
    subfolder = est.export_saved_model(
        export_dir_base=export_dir,
        serving_input_receiver_fn=image_serving_input_fn)

    tf.logging.info('Starting to export TFLite.')
    converter = tf.lite.TFLiteConverter.from_saved_model(
        subfolder, input_arrays=['truediv'], output_arrays=['logits'])
    if params.quantized_training:
        # Export quantized tflite if it is trained with quantized ops.
        converter.inference_type = tf.uint8
        converter.quantized_input_stats = {'truediv': (0., 2.)}
    tflite_model = converter.convert()
    tflite_file = os.path.join(export_dir, params.model_name + '.tflite')
    tf.gfile.GFile(tflite_file, 'wb').write(tflite_model)

    if post_quantize:
        tf.logging.info('Starting to export quantized TFLite.')
        converter = tf.lite.TFLiteConverter.from_saved_model(
            subfolder, input_arrays=['truediv'], output_arrays=['logits'])
        converter.post_training_quantize = True
        quant_tflite_model = converter.convert()
        quant_tflite_file = os.path.join(
            export_dir, params.model_name + '_postquant.tflite')
        tf.gfile.GFile(quant_tflite_file, 'wb').write(quant_tflite_model)
Example #4
0
def export(est, export_dir, input_image_size=None):
    """Export graph to SavedModel and TensorFlow Lite.
  Args:
    est: estimator instance.
    export_dir: string, exporting directory.
    input_image_size: int, input image size.
  Raises:
    ValueError: the export directory path is not specified.
  """
    if not export_dir:
        raise ValueError('The export directory path is not specified.')

    if not input_image_size:
        input_image_size = FLAGS.input_image_size

    tf.logging.info('Starting to export model.')
    image_serving_input_fn = imagenet_input.build_image_serving_input_fn(
        input_image_size)
    est.export_saved_model(export_dir_base=export_dir,
                           serving_input_receiver_fn=image_serving_input_fn)
Example #5
0
def export(est, export_dir, input_image_size=None):
    """Export graph to SavedModel and TensorFlow Lite.

  Args:
    est: estimator instance.
    export_dir: string, exporting directory.
    input_image_size: int, input image size.

  Raises:
    ValueError: the export directory path is not specified.
  """
    if not export_dir:
        raise ValueError('The export directory path is not specified.')

    if not input_image_size:
        input_image_size = FLAGS.input_image_size
    is_cond_conv = FLAGS.model_name.startswith('efficientnet-condconv')
    batch_size = 1 if is_cond_conv else None  # Use fixed batch size for condconv.

    logging.info('Starting to export model.')
    image_serving_input_fn = imagenet_input.build_image_serving_input_fn(
        input_image_size, batch_size=batch_size)
    est.export_saved_model(export_dir_base=export_dir,
                           serving_input_receiver_fn=image_serving_input_fn)