コード例 #1
0
def _save_oncloud(model, export_path):
    tmpPath = './tmp_folder'
    ### Allow overwriting of export_path if it already exists by removing it first..
    if file_io.file_exists(tmpPath):
        # print("Need to overwrite preexisting path. Recursively deleting... ", tmpPath)
        file_io.delete_recursively(tmpPath)

    builder = saved_model_builder.SavedModelBuilder(tmpPath)

    signature = predict_signature_def(inputs={'input': model.inputs[0]},
                                      outputs={'income': model.outputs[0]})

    with K.get_session() as sess:
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                signature
            })

    # Relevant to here: http://liufuyang.github.io/2017/04/02/just-another-tensorflow-beginner-guide-4.html
    # Also, similar hack done in task.py
    modelSavePath = builder.save()

    # Save model on to google storage
    with file_io.FileIO(modelSavePath, mode='rb') as input_f:
        with file_io.FileIO(os.path.join(export_path, basename(modelSavePath)),
                            mode='w+') as output_f:
            output_f.write(input_f.read())
コード例 #2
0
  def testPredictionSignatureDef(self):
    input1 = constant_op.constant("a", name="input-1")
    input2 = constant_op.constant("b", name="input-2")
    output1 = constant_op.constant("c", name="output-1")
    output2 = constant_op.constant("d", name="output-2")
    signature_def = signature_def_utils_impl.predict_signature_def({
        "input-1": input1,
        "input-2": input2
    }, {"output-1": output1,
        "output-2": output2})

    self.assertEqual(signature_constants.PREDICT_METHOD_NAME,
                     signature_def.method_name)

    # Check inputs in signature def.
    self.assertEqual(2, len(signature_def.inputs))
    input1_tensor_info_actual = (signature_def.inputs["input-1"])
    self.assertEqual("input-1:0", input1_tensor_info_actual.name)
    self.assertEqual(types_pb2.DT_STRING, input1_tensor_info_actual.dtype)
    self.assertEqual(0, len(input1_tensor_info_actual.tensor_shape.dim))
    input2_tensor_info_actual = (signature_def.inputs["input-2"])
    self.assertEqual("input-2:0", input2_tensor_info_actual.name)
    self.assertEqual(types_pb2.DT_STRING, input2_tensor_info_actual.dtype)
    self.assertEqual(0, len(input2_tensor_info_actual.tensor_shape.dim))

    # Check outputs in signature def.
    self.assertEqual(2, len(signature_def.outputs))
    output1_tensor_info_actual = (signature_def.outputs["output-1"])
    self.assertEqual("output-1:0", output1_tensor_info_actual.name)
    self.assertEqual(types_pb2.DT_STRING, output1_tensor_info_actual.dtype)
    self.assertEqual(0, len(output1_tensor_info_actual.tensor_shape.dim))
    output2_tensor_info_actual = (signature_def.outputs["output-2"])
    self.assertEqual("output-2:0", output2_tensor_info_actual.name)
    self.assertEqual(types_pb2.DT_STRING, output2_tensor_info_actual.dtype)
    self.assertEqual(0, len(output2_tensor_info_actual.tensor_shape.dim))
コード例 #3
0
def export_tfserving_model(output_graph_temp, output_graph):
    export_path = os.path.join(output_graph)
    if os.path.exists(export_path):
        shutil.rmtree(export_path)
    builder = saved_model_builder.SavedModelBuilder(export_path)

    detection_graph = tf.Graph()
    with detection_graph.as_default():
        od_graph_def = tf.GraphDef()
        with tf.gfile.GFile(output_graph_temp, 'rb') as fid:
            serialized_graph = fid.read()
            od_graph_def.ParseFromString(serialized_graph)
            tf.import_graph_def(od_graph_def, name='')

    with tf.Session(graph=detection_graph) as sess:
        signature = predict_signature_def(inputs={'im_data': sess.graph.get_tensor_by_name('Placeholder:0'),
                                                  'im_info': sess.graph.get_tensor_by_name('Placeholder_1:0')},
                                          outputs={'score': sess.graph.get_tensor_by_name(
                                              'resnet_v1_101_5/cls_score/BiasAdd:0'),
                                              'prob': sess.graph.get_tensor_by_name('resnet_v1_101_5/cls_prob:0'),
                                              'pred': sess.graph.get_tensor_by_name(
                                                  'resnet_v1_101_5/bbox_pred/BiasAdd:0'),
                                              'rois': sess.graph.get_tensor_by_name(
                                                  'resnet_v1_101_3/rois/concat:0')})
        builder.add_meta_graph_and_variables(sess=sess,
                                             tags=[tag_constants.SERVING],
                                             signature_def_map={'predict': signature})
        builder.save()
    # sess.close()
    tf.reset_default_graph()

    if os.path.exists(output_graph_temp):
        os.remove(output_graph_temp)
コード例 #4
0
def export_to_tf_model(model_path, export_folder):
    # reset session
    K.clear_session()
    sess = tf.Session()
    K.set_session(sess)

    # disable loading of learning nodes
    K.set_learning_phase(0)

    # load model
    model = load_model(model_path)
    config = model.get_config()
    weights = model.get_weights()

    # export saved model
    builder = saved_model_builder.SavedModelBuilder(export_folder)

    signature = predict_signature_def(inputs={'input': model.input},
                                      outputs={'output': model.output})

    with K.get_session() as sess:
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                signature
            })
        builder.save()
コード例 #5
0
 def export(self, model, version, path):
     import tensorflow as tf
     K.set_learning_phase(0)
     from tensorflow.python.saved_model import builder
     from tensorflow.python.saved_model import tag_constants, signature_constants
     from tensorflow.python.saved_model import signature_def_utils_impl
     from tensorflow.python.saved_model.utils import build_tensor_info
     # crate new model: make sure learning_phase=test not train
     config = model.get_config()
     weights = model.get_weights()
     new_model = Model.from_config(config)
     new_model.set_weights(weights)
     # create model exporter
     export_path = os.path.join(tf.compat.as_bytes(export_path),
                                tf.compat.as_bytes(str(version)))
     builder = builder.SaveModelBuilder(export_path)
     # create signature
     model_input = build_tensor_info(new_model.input)
     model_output = build_tensor_info(new_model.output)
     predict_signature = signature_def_utils_impl.predict_signature_def(
         inputs={'inputs': model_input}, outputs={'outputs': model_output})
     with K.get_session(
     ) as sess:  # sess is the Tensorflow session that holds your trained model
         builder.add_meta_graph_and_variables(
             sess=sess,
             tags=[tag_constants.SERVING],
             signature_def_map={
                 'predict_images':
                 predict_signature,
                 signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                 classification_signature
             },
             legacy_init_op=legacy_init_op)
     builder.save()
     return
コード例 #6
0
  def testPredictionSignatureDef(self):
    input1 = constant_op.constant("a", name="input-1")
    input2 = constant_op.constant("b", name="input-2")
    output1 = constant_op.constant("c", name="output-1")
    output2 = constant_op.constant("d", name="output-2")
    signature_def = signature_def_utils_impl.predict_signature_def({
        "input-1": input1,
        "input-2": input2
    }, {"output-1": output1,
        "output-2": output2})

    self.assertEqual(signature_constants.PREDICT_METHOD_NAME,
                     signature_def.method_name)

    # Check inputs in signature def.
    self.assertEqual(2, len(signature_def.inputs))
    input1_tensor_info_actual = (signature_def.inputs["input-1"])
    self.assertEqual("input-1:0", input1_tensor_info_actual.name)
    self.assertEqual(types_pb2.DT_STRING, input1_tensor_info_actual.dtype)
    self.assertEqual(0, len(input1_tensor_info_actual.tensor_shape.dim))
    input2_tensor_info_actual = (signature_def.inputs["input-2"])
    self.assertEqual("input-2:0", input2_tensor_info_actual.name)
    self.assertEqual(types_pb2.DT_STRING, input2_tensor_info_actual.dtype)
    self.assertEqual(0, len(input2_tensor_info_actual.tensor_shape.dim))

    # Check outputs in signature def.
    self.assertEqual(2, len(signature_def.outputs))
    output1_tensor_info_actual = (signature_def.outputs["output-1"])
    self.assertEqual("output-1:0", output1_tensor_info_actual.name)
    self.assertEqual(types_pb2.DT_STRING, output1_tensor_info_actual.dtype)
    self.assertEqual(0, len(output1_tensor_info_actual.tensor_shape.dim))
    output2_tensor_info_actual = (signature_def.outputs["output-2"])
    self.assertEqual("output-2:0", output2_tensor_info_actual.name)
    self.assertEqual(types_pb2.DT_STRING, output2_tensor_info_actual.dtype)
    self.assertEqual(0, len(output2_tensor_info_actual.tensor_shape.dim))
コード例 #7
0
def convert_pb_saved_model(model_path, out_put_path):
    model_path = model_path
    with tf.gfile.FastGFile(model_path, "rb") as f:
        graph = tf.get_default_graph()
        graph_def = graph.as_graph_def()
        graph_def.ParseFromString(f.read())
        tf.import_graph_def(graph_def, name='graph')
        summaryWriter = tf.summary.FileWriter('log/', graph)

        result, x = tf.import_graph_def(
            graph_def, return_elements=["fc_out_fc3:0", "input_image:0"])
        #        graph_def = tf.GraphDef()
        #        graph_def.ParseFromString(f.read())

        #        x =tf.get_default_graph().get_tensor_by_name("input_image:0")
        #        result= tf.get_default_graph().get_tensor_by_name("fc_out_fc3:0")

        with tf.Session() as sess:
            init = tf.global_variables_initializer()
            sess.run(init)
            builder = tf.saved_model.builder.SavedModelBuilder(out_put_path)
            signature = predict_signature_def(inputs={'myInput': x},
                                              outputs={'myOutput': result})
            builder.add_meta_graph_and_variables(
                sess=sess,
                tags=[tf.saved_model.tag_constants.SERVING],
                signature_def_map={'predict': signature})
            builder.save()
コード例 #8
0
def run_demo(records1, records2):

    op_sum = tf.add(x1, x2)
    op_mul = tf.multiply(op_sum, kWeight)
    with tf.device('/device:CPU:0'):
        y = tf.add(op_mul, kBias, name=kOutputNodeName)

    with tf.Session() as session:
        tf.global_variables_initializer().run()

        result = session.run(y, feed_dict={x1: records1, x2: records2})
        print(result)

        frozen_model_file = "frozen_add_model.pb"
        print("[INFO]frozen the model to ", frozen_model_file)
        model_util.freeze_graph(session, kOutputNodeName, frozen_model_file)

        # SavedModel
        export_path = "./saved_simple_add/00001"
        builder = tf.saved_model.builder.SavedModelBuilder(export_path)
        signature = predict_signature_def(inputs={
            kInputNodeName1: x1,
            kInputNodeName2: x2,
        },
                                          outputs={kOutputNodeName: y})
        builder.add_meta_graph_and_variables(
            sess=session,
            tags=[tf.saved_model.tag_constants.SERVING],
            signature_def_map={
                tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                signature
            })

        builder.save()
        print("[INFO] savedmodel to ", export_path)
コード例 #9
0
def to_savedmodel(keras_model, export_path):
    """
    Converts Keras model into a tensorflow saved_model format.
    :param keras_model: Loaded keras model
    :param export_path: local directory where you want to save your tensorflow SavedModel format model.
    :return: None. Creates directory and saved_model.pb file
    """
    try:
        builder = saved_model_builder.SavedModelBuilder(export_path)
        signature = predict_signature_def(
            inputs={'input': keras_model.inputs[0]},
            outputs={'output': keras_model.outputs[0]})

        with K.get_session() as sess:
            builder.add_meta_graph_and_variables(
                sess=sess,
                tags=[tag_constants.SERVING],
                signature_def_map={
                    signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                    signature
                })
        builder.save()
    except Exception:
        error_client = error_reporting.Client()
        error_client.report_exception()
コード例 #10
0
def export_for_serving(encoding_dir, checkpoint_dir, export_dir, train_config, seed=0):
    hparams = model.default_hparams()
    with open(os.path.join(encoding_dir, 'hparams.json')) as f:
        hparams.override_from_dict(json.load(f))
    length = hparams.n_ctx // 2

    with tf.Session(graph=tf.Graph()) as sess:
        context = tf.placeholder(tf.int32, [train_config["batch_size"], None])
        np.random.seed(seed)
        tf.set_random_seed(seed)

        output = sample.sample_sequence(
            hparams=hparams,
            length=length,
            context=context,
            batch_size=train_config["batch_size"],
            temperature=1.0,
            top_k=train_config["top_k"]
        )

        saver = tf.train.Saver()
        ckpt = tf.train.latest_checkpoint(checkpoint_dir)
        saver.restore(sess, ckpt)

        builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
        signature = predict_signature_def(inputs={'context': context},
                                          outputs={'sample': output})
        builder.add_meta_graph_and_variables(sess,
                                             [tf.saved_model.SERVING],
                                             signature_def_map={"predict": signature},
                                             strip_default_attrs=True)
        builder.save()
コード例 #11
0
def to_savedmodel(model, export_path):
    """Convert the Keras HDF5 model into TensorFlow SavedModel."""
    model = model.keras_model
    builder = saved_model_builder.SavedModelBuilder(export_path)
    signature = predict_signature_def(inputs={
        'input_image:0': model.inputs[0],
        'input_image_meta:0': model.inputs[1],
        'input_anchors:0': model.inputs[2]
    },
                                      outputs={
                                          'mrcnn_detection/Reshape_1:0':
                                          model.output[0],
                                          'mrcnn_class/Reshape_1:0':
                                          model.output[1],
                                          'mrcnn_bbox/Reshape:0':
                                          model.outputs[2],
                                          'mrcnn_mask/Reshape_1:0':
                                          model.output[3],
                                          'ROI/packed_2:0':
                                          model.output[4],
                                          'rpn_class/concat:0':
                                          model.output[5],
                                          'rpn_bbox/concat:0':
                                          model.output[6]
                                      })

    with K.get_session() as sess:
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                signature
            })
        builder.save()
コード例 #12
0
def main(base_model_name, weights_file, export_path):
    # Load model and weights
    nima = Nima(base_model_name, weights=None)
    nima.build()
    nima.nima_model.load_weights(weights_file)

    # Tell keras that this will be used for making predictions
    K.set_learning_phase(0)

    # CustomObject required by MobileNet
    with CustomObjectScope({
            'relu6': relu6,
            'DepthwiseConv2D': DepthwiseConv2D
    }):
        builder = saved_model_builder.SavedModelBuilder(export_path)
        signature = predict_signature_def(
            inputs={'input_image': nima.nima_model.input},
            outputs={'quality_prediction': nima.nima_model.output})

        builder.add_meta_graph_and_variables(
            sess=K.get_session(),
            tags=[tag_constants.SERVING],
            signature_def_map={'image_quality': signature})
        builder.save()

    print(f'TF model exported to: {export_path}')
コード例 #13
0
    def save(self, filename, path=None):
        """
        Save a model to file in the format specific to the backend framework. For TensorFlow, .ckpt is used.

        :param filename: Name of the file where to store the model.
        :type filename: `str`
        :param path: Path of the folder where to store the model. If no path is specified, the model will be stored in
                     the default data location of the library `ART_DATA_PATH`.
        :type path: `str`
        :return: None
        """
        # pylint: disable=E0611
        import os
        import shutil
        from tensorflow.python import saved_model
        from tensorflow.python.saved_model import tag_constants
        from tensorflow.python.saved_model.signature_def_utils_impl import predict_signature_def

        if path is None:
            from art.config import ART_DATA_PATH
            full_path = os.path.join(ART_DATA_PATH, filename)
        else:
            full_path = os.path.join(path, filename)

        if os.path.exists(full_path):
            shutil.rmtree(full_path)

        builder = saved_model.builder.SavedModelBuilder(full_path)
        signature = predict_signature_def(inputs={'SavedInputPhD': self._input_ph},
                                          outputs={'SavedOutput': self._output})
        builder.add_meta_graph_and_variables(sess=self._sess, tags=[tag_constants.SERVING],
                                             signature_def_map={'predict': signature})
        builder.save()

        logger.info('Model saved in path: %s.', full_path)
コード例 #14
0
def main(args):
    with GFile(args.frozen_model_path, "rb") as f:
        graph_def = GraphDef()
        graph_def.ParseFromString(f.read())

    if os.path.exists(args.output_model_dir) and os.path.isdir(
            args.output_model_dir):
        shutil.rmtree(args.output_model_dir)

    with Session() as sess:
        # Then, we import the graph_def into a new Graph and returns it
        with Graph().as_default() as graph:
            import_graph_def(graph_def, name='')
            signature = predict_signature_def(
                inputs={
                    'image_batch': graph.get_tensor_by_name('image_batch:0'),
                    'phase_train': graph.get_tensor_by_name('phase_train:0')
                },
                outputs={
                    'embeddings': graph.get_tensor_by_name('embeddings:0')
                })

            builder = saved_model_builder.SavedModelBuilder(
                args.output_model_dir)
            builder.add_meta_graph_and_variables(
                sess=sess,
                tags=[tag_constants.SERVING],
                signature_def_map={'serving_default': signature})
            builder.save()
コード例 #15
0
    def ExportModel(self):
        import keras.backend as K
        from tensorflow.python.saved_model import builder as saved_model_builder
        from tensorflow.python.saved_model import utils
        from tensorflow.python.saved_model import tag_constants, signature_constants
        from tensorflow.python.saved_model.signature_def_utils_impl import build_signature_def, predict_signature_def
        from tensorflow.contrib.session_bundle import exporter

        print("EXPORTING MODEL...")

        export_path = 'exported_brain'
        builder = saved_model_builder.SavedModelBuilder(export_path)

        signature = predict_signature_def(
            inputs={'inputs': self.brain.keras.input},
            outputs={'outputs': self.brain.keras.output})

        with K.get_session() as sess:
            builder.add_meta_graph_and_variables(
                sess=sess,
                tags=[tag_constants.TRAINING],
                signature_def_map={'predict': signature})
            builder.save()

        print("...done!")
コード例 #16
0
    def save_model(self, model, input='array'):
        K.set_learning_phase(0)

        input_caption = model.input[1]
        input_image = model.input[0]
        output_prediction = model.output

        if input == 'bytes':
            input_image = tf.placeholder(tf.string, shape=(None,), name='input_string')
            input_bytes_map = tf.map_fn(tf.decode_base64, input_image)
            input_bytes_map.set_shape((None,))
            input_tensor_map = tf.map_fn(tf.image.decode_image, input_bytes_map, dtype=tf.float32)
            input_tensor_map.set_shape((None, None, None, 3))
            input_tensor = tf.image.convert_image_dtype(input_tensor_map, dtype=tf.float32)
            output_prediction = model([input_tensor, input_caption])

        export_path = 'models/serving_model/model-data/{timestamp}'.format(timestamp=int(time.time()))
        builder = saved_model_builder.SavedModelBuilder(export_path)

        signature = predict_signature_def(inputs={'image': input_image,
                                                  'sequence_input': input_caption},
                                          outputs={'sequence_output': output_prediction})

        with K.get_session() as sess:
            builder.add_meta_graph_and_variables(sess=sess,
                                                 tags=[tag_constants.SERVING],
                                                 signature_def_map={'predict': signature})
            builder.save()
コード例 #17
0
def save_as_tensorflow(model: Model, export_path: str, arg_max: tf.Tensor):
    """
    Convert the Keras HDF5 model into TensorFlow SavedModel
    export_path: either local path or Google Cloud Storage's bucket path
                 (ex. "checkpoints", "gs://anderson-mnist/mnist_train_20180530_145010")
    """

    builder = tf_model_builder.SavedModelBuilder(export_path)

    signature = predict_signature_def(inputs={
        'image': model.inputs[0],
        'image_bytes': model.inputs[0]
    },
                                      outputs={
                                          'probabilities': model.outputs[0],
                                          'class': arg_max
                                      })
    sess = K.get_session()
    builder.add_meta_graph_and_variables(
        sess=sess,
        tags=[tag_constants.SERVING],
        signature_def_map={
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
        })
    builder.save()
コード例 #18
0
def export_keras_model(m, export_path):
    builder = saved_model.builder.SavedModelBuilder(export_path)
    signature = predict_signature_def(inputs={'input': m.inputs[0]},
                                      outputs={'sentiment': m.outputs[0]})

    with tf.keras.backend.get_session() as sess:
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[SERVING],
            signature_def_map={DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature})
        builder.save()
コード例 #19
0
    def save_final_model(self):
        builder = saved_model_builder.SavedModelBuilder("../model/")
        signature = predict_signature_def(
            inputs={'images': self.merged_model.input},
            outputs={'scores': self.merged_model.output})

        with K.get_session() as sess:
            builder.add_meta_graph_and_variables(
                sess=sess,
                tags=[tag_constants.SERVING],
                signature_def_map={'predict': signature})
            builder.save()
コード例 #20
0
def main(argv):
    del argv
    mtype = FLAGS.mtype

    x = tf.placeholder(tf.float32, [None, 784], name="input")
    W = tf.Variable(tf.zeros([784, 10]))
    b = tf.Variable(tf.zeros([10]))

    y = tf.nn.softmax(tf.matmul(x, W) + b)
    y_ = tf.placeholder(tf.float32, [None, 10])
    cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), 1))

    tf.identity(y, name="result")

    train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
    tf.global_variables_initializer().run()
    saver = tf.train.Saver()
    constant_graph = graph_util.convert_variables_to_constants(
        sess, sess.graph_def, ['result'])

    for i in range(20):
        batch_xs, batch_ys = mnist.train.next_batch(32)
        train_step.run({x: batch_xs, y_: batch_ys})

    correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
    accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

    print(accuracy.eval({x: mnist.test.images, y_: mnist.test.labels}))

    if "saved_model" in mtype:
        save_file = "./model/saved_model"
        if os.path.exists(save_file):
            import shutil
            shutil.rmtree(save_file)

        builder = tf.compat.v1.saved_model.builder.SavedModelBuilder(
            "./model/saved_model")
        signature = predict_signature_def(inputs={'input': x},
                                          outputs={'result': y})
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={'predict': signature})
        builder.save()
    elif "ckpt" in mtype:
        ckpt_file = "./model/ckpt/"
        if not os.path.exists(ckpt_file):
            os.mkdir(ckpt_file)
        saver.save(sess, ckpt_file + "model.ckpt")
    elif "pb" in mtype:
        with tf.gfile.FastGFile('./model/model.pb', mode='wb') as f:
            f.write(constant_graph.SerializeToString())
コード例 #21
0
 def export(self, filepath):
     shutil.rmtree(filepath)
     builder = saved_model.builder.SavedModelBuilder(filepath)
     signature = predict_signature_def(
         inputs={'input': self.model.input},
         outputs={'output': self.model.output})
     # using custom tag instead of: tags=[tag_constants.SERVING]
     builder.add_meta_graph_and_variables(
         sess=get_session(),
         tags=["tag"],
         signature_def_map={'predict': signature})
     builder.save(as_text=True)
     builder.save(as_text=False)
コード例 #22
0
def session_to_savedmodel(session, inputs, outputs, export_path):
    """Convert the Keras HDF5 model into TensorFlow SavedModel."""
    builder = saved_model_builder.SavedModelBuilder(export_path)

    signature = predict_signature_def(inputs={'inputs': inputs},
                                      outputs={'outputs': outputs})

    builder.add_meta_graph_and_variables(
        sess=session,
        tags=[tag_constants.SERVING],
        signature_def_map={
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
        })
    builder.save()
コード例 #23
0
def convert_model_to_tf_serving(inputs, outputs, signature_name, output_dir, version):
    export_path = os.path.join(output_dir, str(version))
    print('export_path = {}\n'.format(export_path))
    if not os.path.exists(export_path):
        os.makedirs(export_path)

    builder = saved_model_builder.SavedModelBuilder(export_path)
    signature = predict_signature_def(inputs=inputs, outputs=outputs)

    sess = K.get_session()
    builder.add_meta_graph_and_variables(sess=sess,
                                        tags=[tag_constants.SERVING],
                                        signature_def_map={signature_name: signature})
    builder.save()
コード例 #24
0
def export_for_serving(
    model_name='124M',
    seed=None,
    batch_size=1,
    length=None,
    temperature=1,
    top_k=0,
    models_dir='models'
):
    models_dir = 'models'# os.path.expanduser(os.path.expandvars(models_dir))

    hparams = model.default_hparams()
    with open(os.path.join(models_dir, model_name, 'hparams.json')) as f:
        hparams.override_from_dict(json.load(f))

    if length is None:
        length = hparams.n_ctx
    elif length > hparams.n_ctx:
        raise ValueError("Can't get samples longer than window size: %s" % hparams.n_ctx)

    with tf.Session(graph=tf.Graph()) as sess:
        context = tf.placeholder(tf.int32, [batch_size, None])
        np.random.seed(seed)
        tf.set_random_seed(seed)

        output = sample.sample_sequence(
            hparams=hparams, length=length,
            context=context,
            batch_size=batch_size,
            temperature=temperature, top_k=top_k
        )

        saver = tf.train.Saver()
        ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name))
        saver.restore(sess, ckpt)

        export_dir=os.path.join(models_dir, model_name, "export", str(time.time()).split('.')[0])
        if not os.path.isdir(export_dir):
            os.makedirs(export_dir)

        builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
        signature = predict_signature_def(inputs={'context': context},
        outputs={'sample': output})

        builder.add_meta_graph_and_variables(sess,
                                     [tf.saved_model.SERVING],
                                     signature_def_map={"predict": signature},
                                     strip_default_attrs=True)
        builder.save()
コード例 #25
0
def export(model, args):
    from tensorflow.python.saved_model import builder as saved_model_builder
    from tensorflow.python.saved_model import tag_constants
    from tensorflow.python.saved_model.signature_def_utils_impl import predict_signature_def

    builder = saved_model_builder.SavedModelBuilder(args.export_path + '/' +
                                                    str(args.export_version))
    signature = predict_signature_def(inputs={'inputs': model.input},
                                      outputs={'ouputs': model.output})
    with K.get_session() as sess:
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={'predict': signature})
        builder.save()
コード例 #26
0
def to_savedmodel(model, export_path):
    """Convert the Keras HDF5 model into TensorFlow SavedModel."""

    builder = saved_model_builder.SavedModelBuilder(export_path)
    signature = predict_signature_def(inputs={'MBps': model.inputs[0]},
                                      outputs={'Category': model.outputs[0]})
    with K.get_session() as sess:
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                signature
            })
        builder.save()
コード例 #27
0
ファイル: demo.py プロジェクト: wangshoujunnew/dataMining
def saveModelDemo():
    """
    使用saveModel的格式保存模型
    :return:
    """
    with tf.Session(graph=g1) as sess:
        sess.run(tf.global_variables_initializer())
        builder = tf.saved_model.builder.SavedModelBuilder("./saveModel")
        signature = predict_signature_def(inputs={'myInput': myInput},
                                          outputs={'myOutput': myOutput})
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={'predict': signature})
        builder.save()
コード例 #28
0
def _main_(args):

    config_path = args.conf
    weights_path = args.weights
    image_path = args.input

    with open(config_path) as config_buffer:
        config = json.load(config_buffer)

    K.set_learning_phase(0)

    ###############################
    #   Make the model
    ###############################

    yolo = YOLO(backend=config['model']['backend'],
                input_size=config['model']['input_size'],
                labels=config['model']['labels'],
                max_box_per_image=config['model']['max_box_per_image'],
                anchors=config['model']['anchors'])

    ###############################
    #   Load trained weights
    ###############################

    print(weights_path)
    yolo.load_weights(weights_path)

    export_base = 'tfexport'
    export_version = 1
    export_path = os.path.join(tf.compat.as_bytes(export_base),
                               tf.compat.as_bytes(str(export_version)))

    builder = saved_model_builder.SavedModelBuilder(export_path)
    print(yolo.model.inputs[0])
    signature = predict_signature_def(
        inputs={
            "input_image": yolo.model.inputs[0],
            "true_boxes": yolo.model.inputs[1]
        },
        outputs={"outputs": yolo.model.outputs[0]})

    with K.get_session() as sess:
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={'predict': signature})
        builder.save()
コード例 #29
0
ファイル: model.py プロジェクト: zhang01GA/cloudml-samples
def to_savedmodel(model, export_path):
  """Convert the Keras HDF5 model into TensorFlow SavedModel."""

  builder = saved_model_builder.SavedModelBuilder(export_path)

  signature = predict_signature_def(
      inputs={'input': model.inputs[0]}, outputs={'income': model.outputs[0]})

  with K.get_session() as sess:
    builder.add_meta_graph_and_variables(
        sess=sess,
        tags=[tag_constants.SERVING],
        signature_def_map={
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
        })
    builder.save()
コード例 #30
0
def save_as_tensorflow(model: Model, export_path: str):
    builder = tf_model_builder.SavedModelBuilder(export_path)
    signature = predict_signature_def(inputs={'image': model.inputs[0]},
                                      outputs={
                                          'boxes': model.outputs[0],
                                          'scores': model.outputs[1],
                                          'labels': model.outputs[2]
                                      })
    sess = K.get_session()
    builder.add_meta_graph_and_variables(
        sess=sess,
        tags=[tag_constants.SERVING],
        signature_def_map={
            signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
        })
    builder.save()
コード例 #31
0
def save_tensorflow_model(model, export_path):
    if file_io.file_exists(export_path):
        return
    builder = saved_model_builder.SavedModelBuilder(export_path)
    signature = predict_signature_def(inputs={'input': model.inputs[0]},
                                      outputs={'output': model.outputs[0]})

    with K.get_session() as sess:
        builder.add_meta_graph_and_variables(
            sess=sess,
            tags=[tag_constants.SERVING],
            signature_def_map={
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                signature
            })
        builder.save()
コード例 #32
0
    def _store_tf(self, session) -> None:

        json_model_file = open(self.keras_json, "r").read()
        loaded_model = model_from_json(json_model_file)
        loaded_model.load_weights(self.keras_h5)

        builder = saved_model_builder.SavedModelBuilder(self.tf_path)
        signature = predict_signature_def(inputs={'x': loaded_model.input},
                                          outputs={'y': loaded_model.output})

        builder.add_meta_graph_and_variables(
            sess=session,
            tags=[tag_constants.SERVING],
            signature_def_map={'helpers': signature})
        builder.save()
        p.print_success(
            f'Successfully stored TensorFlow model: {self.model_name}')
コード例 #33
0
print ('output is:', net_model.output.name)

sess = K.get_session()

frozen_graph = freeze_session(K.get_session(), output_names=[net_model.output.op.name])

from tensorflow.python.framework import graph_io

graph_io.write_graph(frozen_graph, output_fld, output_graph_name, as_text=False)

print('saved the constant graph (ready for inference) at: ', osp.join(output_fld, output_graph_name))


# --------

from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import utils
from tensorflow.python.saved_model import tag_constants, signature_constants
from tensorflow.python.saved_model.signature_def_utils_impl import  build_signature_def, predict_signature_def
from tensorflow.contrib.session_bundle import exporter
export_path = 'folder_to_export'
builder = saved_model_builder.SavedModelBuilder(export_path)
signature = predict_signature_def(inputs={'images': net_model.input},
                                  outputs={'scores': net_model.output})

with K.get_session() as sess:
    builder.add_meta_graph_and_variables(sess=sess,
                                         tags=[tag_constants.SERVING],
                                         signature_def_map={'predict': signature})
    builder.save()
コード例 #34
0
ファイル: ckpt_pb1.py プロジェクト: DXZ/git_test
graph = tf.Graph().as_default()
# sess = tf.Session()
MODEL_FILE = '/data/knowbox/wangth/workspace/filter/Inception_v3_classifier/model/output/1/saved_model.pb'
BOTTLENECK_TENSOR_NAME = 'pool_3/_reshape:0'  # tensor name for the bottleneck of inception-v3 model
JPEG_DATA_TENSOR_NAME = 'DecodeJpeg:0'  # image tensor
f = tf.gfile.FastGFile(MODEL_FILE, 'rb')
# # import initial pb model
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, name='')
# bottleneck_tensor, jpeg_data_tensor = tf.import_graph_def(graph_def,return_elements=[BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME])
out_tensor = graph_def.get_tensor_by_name('final_result:0')
input_tensor = graph_def.get_tensor_by_name('DecodeJpeg:0')

# prediction_signature = predict_signature_def(inputs={'image': graph.get_operation_by_name('BottleneckInputPlaceholder').outputs[0]},
#                               outputs={'scores': graph.get_operation_by_name('evaluation/ArgMax').outputs[0]})

prediction_signature = predict_signature_def(inputs={'image':input_tensor},
                              outputs={'result':out_tensor})

# legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')
builder.add_meta_graph_and_variables(
    sess=sess,
    tags=[tf.saved_model.tag_constants.SERVING],
    signature_def_map={'predict':prediction_signature},
)

builder.save()
print('Export SavedModel!')
コード例 #35
0
ファイル: ckpt_pb1.py プロジェクト: DXZ/git_test
def export_saved_model(version, path, sess=None):

    net = get_network("VGGnet_test")
    im = 128 * np.ones((300, 300, 3), dtype=np.uint8)


    
    



    tf.app.flags.DEFINE_integer('version', version, 'version number of the model.')
    tf.app.flags.DEFINE_string('work_dir', path, 'your older model  directory.')
    tf.app.flags.DEFINE_string('model_dir', '/tmp/model_name', 'saved model directory')
    FLAGS = tf.app.flags.FLAGS

    # you can give the session and export your model immediately after training 
    if not sess: 
        saver = tf.train.import_meta_graph(os.path.join(path, 'xxx.ckpt.meta'))
        saver.restore(sess, tf.train.latest_checkpoint(path))

    export_path = os.path.join(
        tf.compat.as_bytes(FLAGS.model_dir),
        tf.compat.as_bytes(str(FLAGS.version)))
    builder = tf.saved_model.builder.SavedModelBuilder(export_path)

    # define the signature def map here
    # ...


    os.environ["CUDA_VISIBLE_DEVICES"] = "7"
    config = tf.ConfigProto()
    config.gpu_options.per_process_gpu_memory_fraction = 0.2
    graph = tf.Graph().as_default()
    # with tf.Graph().as_default() as graph:
    # sess = tf.Session()
    # MODEL_FILE = 'model/tensorflow_inception_graph.pb'
    # BOTTLENECK_TENSOR_NAME = 'pool_3/_reshape:0'  # tensor name for the bottleneck of inception-v3 model
    # # JPEG_DATA_TENSOR_NAME = 'DecodeJpeg/contents:0'
    # JPEG_DATA_TENSOR_NAME = 'DecodeJpeg:0'  # image tensor
    # f = tf.gfile.FastGFile(MODEL_FILE, 'rb')
    # # import initial pb model
    # graph_def = tf.GraphDef()
    # graph_def.ParseFromString(f.read())
    # bottleneck_tensor, jpeg_data_tensor = tf.import_graph_def(graph_def,return_elements=[BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME])

    prediction_signature = predict_signature_def(inputs={'image': graph.get_operation_by_name('BottleneckInputPlaceholder').outputs[0]},
                                  outputs={'scores': graph.get_operation_by_name('evaluation/ArgMax').outputs[0]})

    # legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')
    builder.add_meta_graph_and_variables(
        sess=sess,
        tags=[tf.saved_model.tag_constants.SERVING],
        signature_def_map={
            'predict':
                prediction_signature
        },
    )

    builder.save()
    print('Export SavedModel!')