Example #1
0
 def __init__(self, name=None, version=None, version_label=None,
                 signature_name=None, **kwargs):
     super().__init__(model_pb2.ModelSpec(),
                      name=name,
                      version=version,
                      version_label=version_label,
                      signature_name=signature_name,
                      **kwargs)
Example #2
0
def main():
    max_seq_len = 40
    input_id = [
        101, 6821, 3221, 671, 702, 3844, 6407, 4638, 1368, 2094, 102, 0, 0, 0,
        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
        0, 0
    ]
    input_mask = [
        1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
    ]
    segment_id = [
        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
    ]
    if len(input_id) < max_seq_len:
        input_id.extend([0] * (max_seq_len - len(input_id)))
        input_mask.extend([0] * (max_seq_len - len(input_mask)))
        segment_id.extend([0] * (max_seq_len - len(segment_id)))

    batch_size = 10
    input_ids = []
    input_masks = []
    segment_ids = []
    for i in range(batch_size):
        input_ids.append(input_id)
        input_masks.append(input_mask)
        segment_ids.append(segment_id)
    import numpy as np
    input_ids = np.array(input_ids)
    input_mask = np.array(input_masks)
    segment_ids = np.array(segment_ids)
    with tf.python_io.TFRecordWriter("tf_serving_warmup_requests") as writer:
        request = predict_pb2.PredictRequest(
            model_spec=model_pb2.ModelSpec(name="intent_model",
                                           signature_name='serving_default'),
            inputs={
                "input_ids":
                tf.make_tensor_proto(
                    input_ids,
                    dtype=tf.int32,
                    shape=[input_ids.shape[0], input_ids.shape[1]]),
                "input_mask":
                tf.make_tensor_proto(
                    input_mask,
                    dtype=tf.int32,
                    shape=[input_mask.shape[0], input_mask.shape[1]]),
                "segment_ids":
                tf.make_tensor_proto(
                    segment_ids,
                    dtype=tf.int32,
                    shape=[segment_ids.shape[0], segment_ids.shape[1]]),
                "training":
                tf.make_tensor_proto(False, dtype=tf.bool, shape=[])
            })
        log = prediction_log_pb2.PredictionLog(
            predict_log=prediction_log_pb2.PredictLog(request=request))
        writer.write(log.SerializeToString())
Example #3
0
    def _GetModelStatus(self) -> get_model_status_pb2.GetModelStatusResponse:
        """Call GetModelStatus() from model service.

    https://github.com/tensorflow/serving/blob/master/tensorflow_serving/apis/model_service.proto

    Returns:
      GetModelStatusResponse from GetModelStatus().
    """
        request = get_model_status_pb2.GetModelStatusRequest(
            model_spec=model_pb2.ModelSpec(name=self._model_name))
        return self._model_service.GetModelStatus(request)
def main(_):
  assets_dir = make_assets_dir(tf.flags.FLAGS.export_dir)
  with tf.Session() as session:
    random_tensors = load_saved_model(session, tf.flags.FLAGS.export_dir)
    with tf.python_io.TFRecordWriter(os.path.join(assets_dir, 'tf_serving_warmup_requests')) as writer:
      for _ in range(tf.flags.FLAGS.batch_size):
        request = predict_pb2.PredictRequest(
          model_spec=model_pb2.ModelSpec(name=tf.flags.FLAGS.name),
          inputs={k: tf.make_tensor_proto(v) for k, v in session.run(random_tensors).items()}
        )
        log = prediction_log_pb2.PredictionLog(
          predict_log=prediction_log_pb2.PredictLog(request=request))
        writer.write(log.SerializeToString())
Example #5
0
def main(argv):

    count = 0
    images = []
    files = [path.join(path.join(FLAGS.dataset, f))
        for f in os.listdir(FLAGS.dataset)
        if path.isfile(path.join(FLAGS.dataset, f))
    ]

    files = [f for f in files if f.endswith(('.png', '.jpg', '.jpeg'))]

    for file in files:
        img_raw = tf.image.decode_image(open(file, 'rb').read(), channels=3)
        image = preprocess_image(img_raw, FLAGS.input_size)
        image = tf.expand_dims(image, 0)
        images.append(image)

        count += 1

        if count == FLAGS.size:
            break

    input_tensor = tf.concat(images, 0)

    with tf.io.TFRecordWriter('tf_serving_warmup_requests') as writer:
        request = predict_pb2.PredictRequest(
            model_spec=model_pb2.ModelSpec(
                name=FLAGS.model_name
            ),
            inputs={
                FLAGS.input_tensor: tf.make_tensor_proto(
                    input_tensor,
                    shape=input_tensor.shape,
                    dtype=input_tensor.dtype
                )
            }
        )

        log = prediction_log_pb2.PredictionLog(
            predict_log=prediction_log_pb2.PredictLog(request=request)
        )

        writer.write(log.SerializeToString())
        logging.info('"tf_serving_warmup_requests" created with success!')
        logging.info('to use it paste it to the "<model>/<version>/assets.extra" folder on the serving configuration folder')
Example #6
0
def main():
    with tf.io.TFRecordWriter("tf_serving_warmup_requests") as writer:
        # replace <request> with one of:
        #
        # classification_pb2.ClassificationRequest(..)
        # regression_pb2.RegressionRequest(..)
        # inference_pb2.MultiInferenceRequest(..)

        request = predict_pb2.PredictRequest(
            model_spec=model_pb2.ModelSpec(name="youtube_dnn", signature_name="serving_default"),
            inputs={
                "sample": tf.compat.v1.make_tensor_proto([[1, 2, 3, 4, 5]], dtype=tf.int64),
                "label": tf.compat.v1.make_tensor_proto([[0]], dtype=tf.int64)
            }
        )
        print(request)

        log = prediction_log_pb2.PredictionLog(
            predict_log=prediction_log_pb2.PredictLog(request=request))
        for r in range(100):
            writer.write(log.SerializeToString())
        writer.write(log.SerializeToString())
def main():
    ### Generate TFRecords for warming up. ###
    global filename
    for root, dirs, files in os.walk(img_path):
        for img_file in files:
            if img_file.endswith(".png") or img_file.endswith(".bmp") or img_file.endswith(".jpg"):
                filename = os.path.join(root, img_file)
                break
    print(filename)
    with tf.io.TFRecordWriter(f"{target_path}tf_serving_warmup_requests") as writer:
        # replace <request> with one of:
        # predict_pb2.PredictRequest(..)
        # classification_pb2.ClassificationRequest(..)
        # regression_pb2.RegressionRequest(..)
        # inference_pb2.MultiInferenceRequest(..)
        
        ### Method 1 ###
        if model_origin == "saiap":
            request = predict_pb2.PredictRequest()
            request.model_spec.name = MODEL_NAME
            request.model_spec.signature_name = SIG
            request.inputs[INPUTS].CopyFrom(
                tensor_util.make_tensor_proto([saiap_open_image(filename)], tf.string)
            )
        elif model_origin == "wzsda":
            ## Method 2 ###
            request = predict_pb2.PredictRequest(
                model_spec=model_pb2.ModelSpec(name=MODEL_NAME, signature_name=SIG),
                inputs={
                    INPUTS: tensor_util.make_tensor_proto([open_image(filename)], tf.string),
                    "degree": tensor_util.make_tensor_proto(["0"], tf.string)
                }
            )
        else:
            print(f"arg 'model_origin' {model_origin} is not supported!")
        log = prediction_log_pb2.PredictionLog(
            predict_log=prediction_log_pb2.PredictLog(request=request))
        writer.write(log.SerializeToString())
Example #8
0
def export(model_dir, vocab_dir, max_length, theta, emb_size, dim,
           num_oov_buckets):
    while True:
        cur = os.path.join(model_dir, str(int(time.time())))
        if not tf.gfile.Exists(cur):
            break
    print("export model path: %s" % cur)
    method_name = tf.saved_model.signature_constants.PREDICT_METHOD_NAME

    rowkeys_file = os.path.join(vocab_dir, "rowkeys.vocab")
    embedding_file = os.path.join(vocab_dir, "emb.vocab")

    with tf.Graph().as_default(), tf.Session() as sess:
        rowkeys = tf.placeholder(dtype=tf.string, name="rowkeys")
        algorithm_ids = tf.placeholder(dtype=tf.uint32, name="algorithm_id")
        scores = tf.placeholder(dtype=tf.float32, name="scores")

        # rowkeys = tf.constant(["2785c0f6f0e592ah",
        #                        "8605d35a21f857bk",
        #                        "7915d39c6f9755ap",
        #                        "3155d3846cb468bk",
        #                        "4285d39597b375bk"], dtype=tf.string)
        # algorithm_ids = tf.constant([2081,
        #                             2803,
        #                             2086,
        #                             2803,
        #                             2086], dtype=tf.uint32)
        # scores = tf.constant([
        #                                     0.1,
        #                                     0.2,
        #                                     0.3,
        #                                     0.11,
        #                                     0.7
        #                                 ], dtype=float)

        with open(rowkeys_file, encoding="utf8") as fi:
            lines = fi.readlines()
            vocab_size = len(lines)
            print(vocab_size)

        emb = tf.Variable(np.loadtxt(embedding_file, delimiter=' '),
                          dtype=tf.float32)

        print(emb.shape)

        table = tf.contrib.lookup.index_table_from_file(
            vocabulary_file=rowkeys_file,
            vocab_size=vocab_size - num_oov_buckets,
            hasher_spec=tf.contrib.lookup.FastHashSpec,
            num_oov_buckets=num_oov_buckets)
        rowkey_ids = table.lookup(rowkeys)
        rowkeys_embedding = tf.nn.embedding_lookup(emb, rowkey_ids)
        rowkeys_embedding /= tf.linalg.norm(rowkeys_embedding,
                                            axis=1,
                                            keepdims=True)
        rowkeys_embedding = tf.where(tf.is_nan(rowkeys_embedding),
                                     tf.zeros_like(rowkeys_embedding),
                                     rowkeys_embedding)
        similarities = tf.cast(
            tf.matmul(rowkeys_embedding, rowkeys_embedding, transpose_b=True),
            tf.float32)
        kernel_matrix = tf.reshape(
            scores, [-1, 1]) * similarities * tf.reshape(scores, [1, -1])
        # alpha = theta / (2 * (1 - theta))
        # kernel_matrix = tf.math.exp(alpha * tf.reshape(scores, [-1, 1])) * similarities * tf.math.exp(alpha * tf.reshape(scores, [1, -1]))

        indices = dpp(kernel_matrix, max_length)

        predict_rowkeys = tf.gather(rowkeys, indices)
        predict_scores = tf.gather(scores, indices)
        predict_algorithm_ids = tf.gather(algorithm_ids, indices)
        predict_positions = indices

        sess.run(tf.global_variables_initializer())
        sess.run(tf.tables_initializer())
        #sess.run(emb, feed_dict={embedding_placeholder: emb_dict})
        # print(sess.run(predict_rowkeys))
        # print(sess.run(predict_scores))
        signature_def_map = signature({
            "prediction": {
                "inputs": {
                    'rowkeys': rowkeys,
                    "scores": scores,
                    "algorithm_ids": algorithm_ids
                },
                "outputs": {
                    "rowkeys": predict_rowkeys,
                    "scores": predict_scores,
                    "algorithm_ids": predict_algorithm_ids,
                    "origin_position": predict_positions
                },
                "method_name": method_name
            },
        })

        builder = tf.saved_model.builder.SavedModelBuilder(cur)
        builder.add_meta_graph_and_variables(
            sess,
            tags=[tf.saved_model.tag_constants.SERVING],
            signature_def_map=signature_def_map,
            assets_collection=ops.get_collection(
                ops.GraphKeys.ASSET_FILEPATHS),
            main_op=tf.tables_initializer())
        builder.save()

        os.mkdir(os.path.join(cur, "assets.extra"))
        with tf.python_io.TFRecordWriter(
                os.path.join(
                    cur, "assets.extra/tf_serving_warmup_requests")) as writer:
            request = predict_pb2.PredictRequest(
                model_spec=model_pb2.ModelSpec(name="kva_dpp_model",
                                               signature_name="prediction"),
                inputs={
                    "rowkeys":
                    tf.make_tensor_proto(
                        ["2785c0f6f0e592ah", "2785c0f6f0e592ah"],
                        dtype=tf.string),
                    "algorithm_ids":
                    tf.make_tensor_proto([2081, 2081], dtype=tf.uint32),
                    "scores":
                    tf.make_tensor_proto([0.7, 0.7], dtype=tf.float32)
                })
            print(request)
            log = prediction_log_pb2.PredictionLog(
                predict_log=prediction_log_pb2.PredictLog(request=request))
            writer.write(log.SerializeToString())
Example #9
0
def get_predict_request(x):
    model_spec = model_pb2.ModelSpec(name='default', signature_name='export_outputs')
    request = predict_pb2.PredictRequest(model_spec=model_spec)
    request.inputs['x'].CopyFrom(
        tf.contrib.util.make_tensor_proto(x, shape=x.shape))
    return request
import grpc
from tensorflow_serving.apis import predict_pb2
from tensorflow_serving.apis import model_pb2
from tensorflow_serving.apis import prediction_service_pb2_grpc
from tensorflow_serving.apis import get_model_metadata_pb2


CHANNEL_ADDRESS = r'172.16.104.25:19001'
MODEL_NAME = r'3d_nodule_detector'

channel = grpc.insecure_channel(CHANNEL_ADDRESS)
stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)

request = get_model_metadata_pb2.GetModelMetadataRequest(
    model_spec=model_pb2.ModelSpec(name=MODEL_NAME),
    metadata_field=["signature_def"])

response = stub.GetModelMetadata(request)
sigdef_str = response.metadata["signature_def"].value

print ("Name:", response.model_spec.name)
print ("Version:", response.model_spec.version.value)
print (get_model_metadata_pb2.SignatureDefMap.FromString(sigdef_str))