Esempio n. 1
0
        self.uri = uri
        self.compat_tf_versions = compat.get_compat_tf_versions(
            compat_tf_versions)
        self.name = name

        if input_image_shape is None:
            input_image_shape = [224, 224]
        self.input_image_shape = input_image_shape


mobilenet_v2_spec = functools.partial(
    ImageModelSpec,
    uri='https://tfhub.dev/google/tf2-preview/mobilenet_v2/feature_vector/4',
    compat_tf_versions=2,
    name='mobilenet_v2')
mobilenet_v2_spec.__doc__ = util.wrap_doc(ImageModelSpec,
                                          'Creates MobileNet v2 model spec.')
mm_export('image_classifier.MobileNetV2Spec').export_constant(
    __name__, 'mobilenet_v2_spec')

resnet_50_spec = functools.partial(
    ImageModelSpec,
    uri='https://tfhub.dev/google/imagenet/resnet_v2_50/feature_vector/4',
    compat_tf_versions=2,
    name='resnet_50')
resnet_50_spec.__doc__ = util.wrap_doc(ImageModelSpec,
                                       'Creates ResNet 50 model spec.')
mm_export('image_classifier.Resnet50Spec').export_constant(
    __name__, 'resnet_50_spec')

efficientnet_lite0_spec = functools.partial(
    ImageModelSpec,
Esempio n. 2
0
            eval_metrics = squad_evaluate_v1_1.evaluate(
                pred_dataset, all_predictions)
        return eval_metrics


mobilebert_classifier_spec = functools.partial(
    BertClassifierModelSpec,
    uri=
    'https://tfhub.dev/google/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT/1',
    is_tf2=False,
    distribution_strategy='off',
    name='MobileBert',
    default_batch_size=48,
)
mobilebert_classifier_spec.__doc__ = util.wrap_doc(
    BertClassifierModelSpec,
    'Creates MobileBert model spec for the text classification task. See also: `tflite_model_maker.text_classifier.BertClassifierSpec`.'
)
mm_export('text_classifier.MobileBertClassifierSpec').export_constant(
    __name__, 'mobilebert_classifier_spec')

mobilebert_qa_spec = functools.partial(
    BertQAModelSpec,
    uri=
    'https://tfhub.dev/google/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT/1',
    is_tf2=False,
    distribution_strategy='off',
    learning_rate=4e-05,
    name='MobileBert',
    default_batch_size=32,
)
mobilebert_qa_spec.__doc__ = util.wrap_doc(
Esempio n. 3
0
              tf.lite.OpsSet.TFLITE_BUILTINS
          ]

      tflite_model = converter.convert()

      with tf.io.gfile.GFile(tflite_filepath, 'wb') as f:
        f.write(tflite_model)


efficientdet_lite0_spec = functools.partial(
    EfficientDetModelSpec,
    model_name='efficientdet-lite0',
    uri='https://tfhub.dev/tensorflow/efficientdet/lite0/feature-vector/1',
)
efficientdet_lite0_spec.__doc__ = util.wrap_doc(
    EfficientDetModelSpec,
    'Creates EfficientDet-Lite0 model spec. See also: `tflite_model_maker.object_detector.EfficientDetSpec`.'
)
mm_export('object_detector.EfficientDetLite0Spec').export_constant(
    __name__, 'efficientdet_lite0_spec')

efficientdet_lite1_spec = functools.partial(
    EfficientDetModelSpec,
    model_name='efficientdet-lite1',
    uri='https://tfhub.dev/tensorflow/efficientdet/lite1/feature-vector/1',
)
efficientdet_lite1_spec.__doc__ = util.wrap_doc(
    EfficientDetModelSpec,
    'Creates EfficientDet-Lite1 model spec. See also: `tflite_model_maker.object_detector.EfficientDetSpec`.'
)
mm_export('object_detector.EfficientDetLite1Spec').export_constant(
    __name__, 'efficientdet_lite1_spec')
Esempio n. 4
0
def bert_qa_spec(**kwargs):
    return BertQAModelSpec(**kwargs)


mobilebert_classifier_spec = functools.partial(
    BertClassifierModelSpec,
    uri=
    'https://tfhub.dev/google/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT/1',
    is_tf2=False,
    distribution_strategy='off',
    name='MobileBert',
    default_batch_size=48,
)
mobilebert_classifier_spec.__doc__ = util.wrap_doc(
    BertClassifierModelSpec,
    'Creates MobileBert model spec for the text classification task.')
mm_export('text_classifier.MobileBertClassifierSpec').export_constant(
    __name__, 'mobilebert_classifier_spec')

mobilebert_qa_spec = functools.partial(
    BertQAModelSpec,
    uri=
    'https://tfhub.dev/google/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT/1',
    is_tf2=False,
    distribution_strategy='off',
    learning_rate=4e-05,
    name='MobileBert',
    default_batch_size=32,
)
mobilebert_qa_spec.__doc__ = util.wrap_doc(
Esempio n. 5
0
            'eval_top_k': eval_top_k,
        }

    def create_model(self):
        """Creates recommendation model based on params.

    Returns:
      Keras model.
    """
        return _rm.RecommendationModel(self.params)


recommendation_bow_spec = functools.partial(RecommendationSpec,
                                            encoder_type='bow')
recommendation_bow_spec.__doc__ = util.wrap_doc(
    RecommendationSpec,
    'Creates Recommendation Bag-of-Word (BoW) model spec. See also: `tflite_model_maker.recommendation.ModelSpec`.'
)
mm_export('recommendation.BowSpec').export_constant(__name__,
                                                    'recommendation_bow_spec')

recommendation_cnn_spec = functools.partial(RecommendationSpec,
                                            encoder_type='cnn')
recommendation_cnn_spec.__doc__ = util.wrap_doc(
    RecommendationSpec,
    'Creates Recommendation CNN model spec. See also: `tflite_model_maker.recommendation.ModelSpec`.'
)
mm_export('recommendation.CnnSpec').export_constant(__name__,
                                                    'recommendation_cnn_spec')

recommendation_rnn_spec = functools.partial(RecommendationSpec,
                                            encoder_type='rnn')
Esempio n. 6
0
            input_image_shape = [224, 224]
        self.input_image_shape = input_image_shape

    def get_default_quantization_config(self, representative_data):
        """Gets the default quantization configuration."""
        config = configs.QuantizationConfig.for_int8(representative_data)
        return config


mobilenet_v2_spec = functools.partial(
    ImageModelSpec,
    uri='https://tfhub.dev/google/tf2-preview/mobilenet_v2/feature_vector/4',
    compat_tf_versions=2,
    name='mobilenet_v2')
mobilenet_v2_spec.__doc__ = util.wrap_doc(
    ImageModelSpec,
    'Creates MobileNet v2 model spec. See also: `tflite_model_maker.image_classifier.ModelSpec`.'
)
mm_export('image_classifier.MobileNetV2Spec').export_constant(
    __name__, 'mobilenet_v2_spec')

resnet_50_spec = functools.partial(
    ImageModelSpec,
    uri='https://tfhub.dev/google/imagenet/resnet_v2_50/feature_vector/4',
    compat_tf_versions=2,
    name='resnet_50')
resnet_50_spec.__doc__ = util.wrap_doc(
    ImageModelSpec,
    'Creates ResNet 50 model spec. See also: `tflite_model_maker.image_classifier.ModelSpec`.'
)
mm_export('image_classifier.Resnet50Spec').export_constant(
    __name__, 'resnet_50_spec')
Esempio n. 7
0
        'lstm_num_units': lstm_num_units,
        'eval_top_k': eval_top_k,
    }

  def create_model(self):
    """Creates recommendation model based on params.

    Returns:
      Keras model.
    """
    return _rm.RecommendationModel(self.params)


recommendation_bow_spec = functools.partial(
    RecommendationSpec, encoder_type='bow')
recommendation_bow_spec.__doc__ = util.wrap_doc(
    RecommendationSpec, 'Creates Recommendation Bag-of-Word (BoW) model spec.')
mm_export('recommendation.BowSpec').export_constant(__name__,
                                                    'recommendation_bow_spec')

recommendation_cnn_spec = functools.partial(
    RecommendationSpec, encoder_type='cnn')
recommendation_cnn_spec.__doc__ = util.wrap_doc(
    RecommendationSpec, 'Creates Recommendation CNN model spec.')
mm_export('recommendation.CnnSpec').export_constant(__name__,
                                                    'recommendation_cnn_spec')

recommendation_rnn_spec = functools.partial(
    RecommendationSpec, encoder_type='rnn')
recommendation_rnn_spec.__doc__ = util.wrap_doc(
    RecommendationSpec, 'Creates Recommendation RNN model spec.')
mm_export('recommendation.RnnSpec').export_constant(__name__,
Esempio n. 8
0
          converter.target_spec.supported_ops += [
              tf.lite.OpsSet.TFLITE_BUILTINS
          ]

      tflite_model = converter.convert()

      with tf.io.gfile.GFile(tflite_filepath, 'wb') as f:
        f.write(tflite_model)


efficientdet_lite0_spec = functools.partial(
    EfficientDetModelSpec,
    model_name='efficientdet-lite0',
    uri='https://tfhub.dev/tensorflow/efficientdet/lite0/feature-vector/1',
)
efficientdet_lite0_spec.__doc__ = util.wrap_doc(
    EfficientDetModelSpec, 'Creates EfficientDet-Lite0 model spec.')
mm_export('object_detector.EfficientDetLite0Spec').export_constant(
    __name__, 'efficientdet_lite0_spec')

efficientdet_lite1_spec = functools.partial(
    EfficientDetModelSpec,
    model_name='efficientdet-lite1',
    uri='https://tfhub.dev/tensorflow/efficientdet/lite1/feature-vector/1',
)
efficientdet_lite1_spec.__doc__ = util.wrap_doc(
    EfficientDetModelSpec, 'Creates EfficientDet-Lite1 model spec.')
mm_export('object_detector.EfficientDetLite1Spec').export_constant(
    __name__, 'efficientdet_lite1_spec')

efficientdet_lite2_spec = functools.partial(
    EfficientDetModelSpec,