Exemple #1
0
def save_inference_model(export_dir,
                         inference_model,
                         session=None,
                         replace=True):
    if session is None:
        session = tf.get_default_session()
    assert session is not None
    if replace and is_directory(export_dir):
        get_logger().info('replacing %s', export_dir)
        delete_recursively(export_dir)
    prediction_signature = predict_signature_def(
        inputs={INPUTS_KEY: inference_model.inputs_tensor},
        outputs={
            k: v
            for k, v in {
                OUTPUTS_KEY: inference_model.outputs_tensor,
                LABELS_KEY: inference_model.labels_tensor,
                COLORS_KEY: inference_model.colors_tensor
            }.items() if v is not None
        })
    signature_def_map = {
        DEFAULT_SERVING_SIGNATURE_DEF_KEY: prediction_signature
    }
    legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')
    builder = SavedModelBuilder(export_dir)
    builder.add_meta_graph_and_variables(session, [SERVING],
                                         signature_def_map=signature_def_map,
                                         legacy_init_op=legacy_init_op)
    builder.save()
Exemple #2
0
def export_model(keras_model, export_path, model_version=0, weights_path=None):
    """Export a model for use with tensorflow-serving.

    Args:
        keras_model: instantiated Keras model to export
        export_path: destination to save the exported model files
        model_version: integer version of the model
        weights_path: path to a .h5 or .tf weights file for the model to load
    """
    # Start the tensorflow session
    gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.8, allow_growth=False)
    sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))

    K.set_session(sess)
    K._LEARNING_PHASE = tf.constant(0)
    K.set_learning_phase(0)

    # Create export path if it doesn't exist
    export_path = os.path.join(export_path, str(model_version))
    builder = SavedModelBuilder(export_path)
    # legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')

    # Initialize global variables and the model
    init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
    sess.run(init_op)

    # Load the model and the weights
    if weights_path is not None:
        keras_model.load_weights(weights_path)

    if type(keras_model.input) is list:
        output = keras_model.output[-1]
    else:
        output = keras_model.output

    # Define prediction signature
    if type(keras_model.input) is list:
        input_map = {"input{}".format(i): input_tensor
                     for i, input_tensor in enumerate(keras_model.input)}
        output_map = {"prediction": output}
    else:
        input_map = {"input": keras_model.input}
        output_map = {"prediction": output}

    prediction_signature = tf.saved_model.signature_def_utils.predict_signature_def(
        input_map,
        output_map
    )

    # Add the meta_graph and the variables to the builder
    builder.add_meta_graph_and_variables(
        sess, [tag_constants.SERVING],
        signature_def_map={
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                prediction_signature
        })

    # Save the graph
    builder.save()
Exemple #3
0
def export_model(keras_model, export_path, model_version=0, weights_path=None):
    # Start the tensorflow session
    gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.8,
                                allow_growth=False)
    sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))

    K.set_session(sess)
    K._LEARNING_PHASE = tf.constant(0)
    K.set_learning_phase(0)

    # Create export path if it doesn't exist
    export_path = os.path.join(export_path, str(model_version))
    builder = SavedModelBuilder(export_path)
    # legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')

    # Initialize global variables and the model
    init_op = tf.group(tf.global_variables_initializer(),
                       tf.local_variables_initializer())
    sess.run(init_op)

    # Load the model and the weights
    if weights_path is not None:
        keras_model.load_weights(weights_path)

    if isinstance(keras_model.output, list):
        output = keras_model.output[-1]
    else:
        output = keras_model.output

    # Define prediction signature
    prediction_signature = tf.saved_model.signature_def_utils.predict_signature_def(
        {'image': keras_model.input}, {'prediction': output})

    # Add the meta_graph and the variables to the builder
    builder.add_meta_graph_and_variables(
        sess, [tag_constants.SERVING],
        signature_def_map={
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
            prediction_signature
        })

    # Save the graph
    builder.save()
Exemple #4
0
    def to_savedmodel(self):
        """
        Función que transforma el modelo de .h5 (Keras) a .pb(Tensorflow) y
        guarda ambos en Storage

        Returns:
            - res(string): ruta donde se guarda modelo transformado
        """
        model = load_model(self.fname)
        path = self.model_dest+"/"+self.version_name

        try:
            builder = SavedModelBuilder(path)
            signature = predict_signature_def(
                inputs={"inputs": model.input},
                outputs={"outputs": model.output})
            with K.get_session() as sess:
                builder.add_meta_graph_and_variables(
                    sess=sess,
                    tags=[tag_constants.SERVING],
                    signature_def_map={
                        'predict': signature})
                builder.save()
            res = "Modelo guardado en formato .pb en {}".format(path)
            logging.info(res)
        except AssertionError as exception:
            res = exception
            logging.error(exception)

        try:
            logging.info(f"Subiendo modelo en H5 a Storage: {path}")
            self.upload_blob()
        except AssertionError as exception:
            pass
            logging.error(f"Error al tratar de subir el modelo H5:{exception}')

        return res
Exemple #5
0
from tensorflow.python.saved_model.utils import build_tensor_info

x = tf.placeholder(tf.float32, name='x')
y = tf.placeholder(tf.float32, name='y')

w = tf.Variable(tf.random_uniform([1], -1.0, 1.0), name='w')
b = tf.Variable(tf.zeros([1]), name='b')
y_hat = tf.add(w * x, b, name="y_hat")

loss = tf.reduce_mean(tf.square(y_hat - y))
optimizer = tf.train.GradientDescentOptimizer(0.5)
train = optimizer.minimize(loss, name='train')

init = tf.variables_initializer(tf.global_variables(), name='init')

directory = 'examples/saved-regression-model'
builder = SavedModelBuilder(directory)

with tf.Session(graph=tf.get_default_graph()) as sess:
    sess.run(init)

    signature_inputs = {"x": build_tensor_info(x), "y": build_tensor_info(y)}
    signature_outputs = {"out": build_tensor_info(y_hat)}
    signature_def = build_signature_def(signature_inputs, signature_outputs,
                                        REGRESS_METHOD_NAME)
    builder.add_meta_graph_and_variables(
        sess, [TRAINING, SERVING],
        signature_def_map={REGRESS_METHOD_NAME: signature_def},
        assets_collection=tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS))
    builder.save(as_text=False)
Exemple #6
0
                                                              labels=y,
                                                              name="softmax"),
                      name="cost")
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(
    cost, name="train")

# Evaluate model
correct_pred = tf.equal(tf.argmax(pred, 1, name="pred"),
                        tf.argmax(y, 1),
                        name="correct_pred")
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32), name="accuracy")

# Initializing the variables
init = tf.global_variables_initializer()

builder = SavedModelBuilder(EXPORT_DIR)

# Launch the graph
with tf.Session(graph=tf.get_default_graph()) as sess:
    sess.run(init)
    step = 1
    # Keep training until reach max iterations
    while step * batch_size < training_iters:
        batch_x, batch_y = mnist.train.next_batch(batch_size)
        # Run optimization op (backprop)
        sess.run(optimizer,
                 feed_dict={
                     x: batch_x,
                     y: batch_y,
                     keep_prob: dropout
                 })
Exemple #7
0
# initialise and open tf session
with tf.Session() as sess:

    # set keras backend to use this session
    K.set_session(sess)

    # load the saved model
    model_path = os.path.join(export_base, model_name + ".h5")
    model = load_model(model_path)

    # load global vars into the tf session
    sess.run(tf.global_variables_initializer())

    # for testing set to 0, for training model set to 1
    K.set_learning_phase(0)

    # builder for saving the model at the export path
    builder = SavedModelBuilder(export_path)

    # generate model signature map to use the model in the tf serving
    signature = predict_signature_def(inputs={"inputs": model.input},
                                      outputs={"outputs": model.output})

    # tag the model
    builder.add_meta_graph_and_variables(
        sess=sess,
        tags=[tag_constants.SERVING],
        signature_def_map={'predict': signature})

    # save builder instance and get the .pb file for deployment
    builder.save()
Exemple #8
0
    def inference_model(model_path, hparams):
        inference_model = CallResponseModel(hparams)

        with open(join(model_path, 'variables.json'), mode='r') as f:
            core_variables = json.load(f)

        if hparams['float_type'] == 'float64':
            tf_type = tf.float64
            np_type = np.float64
        elif hparams['float_type'] == 'float32':
            tf_type = tf.float32
            np_type = np.float32
        else:
            raise ValueError()

        # load all checkpointed variables to convert them to float_type
        tf.reset_default_graph()
        with tf.Session() as sess:
            variables = {}
            variables_to_load = []
            for name, shape in core_variables.items():
                ref = tf.Variable(np.zeros(shape, dtype=np_type), name=name[:-2])
                variables[name] = ref
                variables_to_load.append(ref)

            saver = tf.train.Saver(variables_to_load)
            cp = tf.train.latest_checkpoint(join(model_path, 'regression'))
            saver.restore(sess, cp)

            variable_values = {}
            for name, var in variables.items():
                variable_values[name] = var.eval().astype(np_type)
        sess.close()
        tf.reset_default_graph()

        with tf.Session() as sess:
            call = tf.placeholder(
                dtype=tf_type,
                shape=(None, None, inference_model.encoder.encoding_channels),
                name='call')
            inference_model.core_graph(call, None)

            sess.run(tf.global_variables_initializer())
            for var in tf.global_variables():
                var.load(variable_values[var.name], session=sess)

            builder_path = join(model_path, 'inference_builder')

            try:
                shutil.rmtree(builder_path)
            except OSError as e:
                if e.errno == ENOENT:
                    pass
                else:
                    raise
            builder = SavedModelBuilder(join(model_path, 'inference_builder'))
            builder.add_meta_graph_and_variables(
                sess, [])
            builder.save()

        with open(join(model_path, 'inference_builder', 'encoder.json'), mode='w') as f:
            json.dump(hparams['encoder_dict'], f)