Exemple #1
0
def main(_):
    writer = ImageClassifierWriter.create_for_inference(
        writer_utils.load_file(flags.FLAGS.model_file), [127.5], [127.5],
        [flags.FLAGS.label_file])

    writer_utils.save_file(writer.populate(), flags.FLAGS.output_file)

    print(writer.get_metadata_json())
async def create_upload_file(type: str,
                             format: str,
                             background_tasks: BackgroundTasks,
                             model: UploadFile = File(...),
                             dataset: UploadFile = File(default=None)):

    print("uploading", flush=True)
    global labels, datapath, instanceReady
    instanceReady = False
    if (type != 'audio' or format != 'tflite'):
        return {'format not supported'}
    model_dir = tempfile.mkdtemp()
    print("### Created " + model_dir)
    data_dir = tempfile.mkdtemp()
    unzipFile(model, model_dir)

    background_tasks.add_task(cleanup_files, model_dir, data_dir)

    with open(model_dir + '/metadata.json') as json_file:
        data = json.load(json_file)
    labels = data['wordLabels']

    print("Generating lables.txt")
    labels_path = model_dir + '/labels.txt'
    with open(labels_path, 'w') as f:
        for idx, label in enumerate(labels):
            f.write("{} {}\n".format(idx, label))
    print('Labels:' + ', '.join(labels), flush=True)

    # specify path to original model and load
    tfjs_model_json_path = model_dir + '/model.json'
    model = tfjs.converters.load_keras_model(tfjs_model_json_path)

    # construct the new model by combining preproc and main classifier
    combined_model = tf.keras.Sequential(name='combined_model')
    combined_model.add(preproc_model)
    combined_model.add(model)
    combined_model.build([None, input_length])
    # save the model as a tflite file
    tflite_output_path = model_dir + '/soundclassifier.tflite'
    converter = tf.lite.TFLiteConverter.from_keras_model(combined_model)
    with open(tflite_output_path, 'wb') as f:
        f.write(converter.convert())

    # add metadata to model
    save_to_path = model_dir + '/soundclassifier_with_metadata.tflite'
    channels = 1
    tm_sample_rate = 44100
    writer = AudioClassifierWriter.create_for_inference(
        writer_utils.load_file(tflite_output_path), tm_sample_rate, channels,
        [labels_path])
    writer_utils.save_file(writer.populate(), save_to_path)
    return returnFile('soundclassifier_with_metadata.tflite', model_dir,
                      data_dir, False)
Exemple #3
0
    def _export_tflite(
            self,
            tflite_filepath: str,
            quantization_config: configs.QuantizationConfigType = 'default',
            with_metadata: bool = True,
            export_metadata_json_file: bool = False) -> None:
        """Converts the retrained model to tflite format and saves it.

    Args:
      tflite_filepath: File path to save tflite model.
      quantization_config: Configuration for post-training quantization. If
        'default', sets the `quantization_config` by default according to
        `self.model_spec`. If None, exports the float tflite model without
        quantization.
      with_metadata: Whether the output tflite model contains metadata.
      export_metadata_json_file: Whether to export metadata in json file. If
        True, export the metadata in the same directory as tflite model.Used
        only if `with_metadata` is True.
    """
        if quantization_config == 'default':
            quantization_config = self.model_spec.get_default_quantization_config(
                self.representative_data)

        self.model_spec.export_tflite(self.model, tflite_filepath,
                                      quantization_config)

        if with_metadata:
            with tempfile.TemporaryDirectory() as temp_dir:
                tf.compat.v1.logging.info(
                    'Label file is inside the TFLite model with metadata.')
                label_filepath = os.path.join(temp_dir, 'labelmap.txt')
                self._export_labels(label_filepath)
                writer = metadata_writer.MetadataWriter.create_for_inference(
                    writer_utils.load_file(tflite_filepath),
                    [self.model_spec.config.mean_rgb],
                    [self.model_spec.config.stddev_rgb], [label_filepath])
                writer_utils.save_file(writer.populate(), tflite_filepath)

                if export_metadata_json_file:
                    metadata_json = writer.get_populated_metadata_json()
                    export_json_path = os.path.splitext(
                        tflite_filepath)[0] + '.json'
                    with open(export_json_path, 'w') as f:
                        f.write(metadata_json)
Exemple #4
0
 def __init__(self, tflite_filepath, **kwargs):
     self._model = writer_utils.load_file(tflite_filepath)
     self._general_md = md_info.GeneralMd(**kwargs)
     self._inputs = []
     self._outputs = []
Exemple #5
0
from tflite_support.metadata_writers import object_detector
from tflite_support.metadata_writers import writer_utils

writer = object_detector.MetadataWriter.create_for_inference(
    writer_utils.load_file("od_model.tflite"),
    input_norm_mean=[0],
    input_norm_std=[255],
    label_file_paths=["class-names.txt"])
writer_utils.save_file(writer.populate(), "detect.tflite")
Exemple #6
0
from tflite_support.metadata_writers import image_classifier
from tflite_support.metadata_writers import writer_utils

ImageClassifierWriter = image_classifier.MetadataWriter
_MODEL_PATH = "./model.tflite"
# Task Library expects label files that are in the same format as the one below.
_LABEL_FILE = "./label.txt"
_SAVE_TO_PATH = "./model_metadata_added.tflite"
# Normalization parameters is required when reprocessing the image. It is
# optional if the image pixel values are in range of [0, 255] and the input
# tensor is quantized to uint8. See the introduction for normalization and
# quantization parameters below for more details.
# https://www.tensorflow.org/lite/convert/metadata#normalization_and_quantization_parameters)
_INPUT_NORM_MEAN = 0.0
_INPUT_NORM_STD = 1.0

# Create the metadata writer.
writer = ImageClassifierWriter.create_for_inference(
    writer_utils.load_file(_MODEL_PATH), [_INPUT_NORM_MEAN], [_INPUT_NORM_STD],
    [_LABEL_FILE])

# Verify the metadata generated by metadata writer.
print(writer.get_metadata_json())

# Populate the metadata into the model.
writer_utils.save_file(writer.populate(), _SAVE_TO_PATH)
        print("Activate Multi GPU")
        for gpu in gpus:
            tf.config.experimental.set_memory_growth(gpu, True)
        strategy = tf.distribute.MirroredStrategy(cross_device_ops=tf.distribute.HierarchicalCopyAllReduce())
    except RuntimeError as e:
        print(e)

else:
    try:
        print("Activate Sigle GPU")
        tf.config.experimental.set_memory_growth(gpus[0], True)
        strategy = tf.distribute.experimental.CentralStorageStrategy()
    except RuntimeError as e:
        print(e)


label_file_paths='/data/datasets/traffic_sign/labels.txt'
path = "/home/barcelona/tensorflow/models/research/object_detection/custom/models/traffic_sign/21_06_18"
saved_model_dir = f"{path}/saved_model"

converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir, signature_keys=['serving_default'])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.experimental_new_converter = True
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS, tf.lite.OpsSet.SELECT_TF_OPS]
tflite_model = converter.convert()

with tf.io.gfile.GFile(f'{path}/custom.tflite', 'wb') as f:
  f.write(tflite_model)

writer = object_detector.MetadataWriter.create_for_inference(writer_utils.load_file(f'{path}/custom.tflite'), input_norm_mean=[0], input_norm_std=[255], label_file_paths=[label_file_paths])
writer_utils.save_file(writer.populate(), f'{path}/custom.tflite')