def test_get_model(mocker, hub_url, input_shape, kwargs): mock_keras_layer = mocker.Mock(tensorflow_hub.keras_layer.KerasLayer) mock_sequential = mocker.Mock(tensorflow.python.keras.engine.sequential.Sequential) mocker.patch('tensorflow_hub.KerasLayer', return_value=mock_keras_layer) mocker.patch('tensorflow.keras.Sequential', return_value=mock_sequential) model = extract_from_tfhub.get_model(hub_url, input_shape, **kwargs) assert model == mock_sequential
def main(): os.makedirs(SAVEDMODEL_DIR, exist_ok=True) if os.path.exists(PB_FILE): print(f"saved model {SAVEDMODEL_DIR} found") model = tf.keras.models.load_model(SAVEDMODEL_DIR) else: print(f"saved model {SAVEDMODEL_DIR} not found") model = get_model(HUB_URL, (224, 224, 3)) preprocess = TFImagePreprocessTransformer(image_size=(224, 224), prediction_shape=(1, 224, 224, 3)) postprocess = SoftmaxTransformer() image = Image.open(SAMPLE_IMAGE) validate(image, preprocess, model, postprocess) tf.saved_model.save(model, SAVEDMODEL_DIR) modelname = "mobilenetv2_plant" interface_filename = f"{modelname}.yaml" preprocess_filename = f"{modelname}_preprocess_transformer.pkl" postprocess_filename = f"{modelname}_softmax_transformer.pkl" preprocess_filepath = os.path.join(MODEL_DIR, preprocess_filename) postprocess_filepath = os.path.join(MODEL_DIR, postprocess_filename) dump_sklearn(preprocess, preprocess_filepath) dump_sklearn(postprocess, postprocess_filepath) save_interface( modelname, os.path.join(MODEL_DIR, interface_filename), [1, 224, 224, 3], "float32", [1, 2102], "float32", DATA_TYPE.IMAGE, [{ preprocess_filepath: MODEL_RUNTIME.SKLEARN }, { SAVEDMODEL_DIR: MODEL_RUNTIME.TF_SERVING }, { postprocess_filepath: MODEL_RUNTIME.SKLEARN }], PREDICTION_TYPE.CLASSIFICATION, "src.app.ml.mobilenetv2_plant.mobilenetv2_predictor", label_filepath=LABEL_FILEPATH, model_spec_name="mobilenetv2_plant", model_spec_signature_name="serving_default", input_name="input_1", output_name="keras_layer", )
def main(): os.makedirs(SAVEDMODEL_DIR, exist_ok=True) if os.path.exists(PB_FILE): print(f'saved model {SAVEDMODEL_DIR} found') model = tf.keras.models.load_model(SAVEDMODEL_DIR) else: print(f'saved model {SAVEDMODEL_DIR} not found') model = get_model(HUB_URL, (299, 299, 3)) preprocess = TFImagePreprocessTransformer() postprocess = SoftmaxTransformer() image = Image.open(SAMPLE_IMAGE) validate(image, preprocess, model, postprocess) tf.saved_model.save(model, SAVEDMODEL_DIR) modelname = 'inceptionv3' interface_filename = f'{modelname}.yaml' preprocess_filename = f'{modelname}_preprocess_transformer.pkl' postprocess_filename = f'{modelname}_softmax_transformer.pkl' preprocess_filepath = os.path.join(MODEL_DIR, preprocess_filename) postprocess_filepath = os.path.join(MODEL_DIR, postprocess_filename) dump_sklearn(preprocess, preprocess_filepath) dump_sklearn(postprocess, postprocess_filepath) save_interface(modelname, os.path.join(MODEL_DIR, interface_filename), [1, 299, 299, 3], 'float32', [1, 1001], 'float32', DATA_TYPE.IMAGE, [{ preprocess_filepath: MODEL_RUNTIME.SKLEARN }, { SAVEDMODEL_DIR: MODEL_RUNTIME.TF_SERVING }, { postprocess_filepath: MODEL_RUNTIME.SKLEARN }], PREDICTION_TYPE.CLASSIFICATION, 'src.app.ml.inceptionv3.inceptionv3_predictor', label_filepath=LABEL_FILEPATH, model_spec_name='inceptionv3', model_spec_signature_name='serving_default', input_name='input_1', output_name='keras_layer')