def test_build_standardized_signature_def_classification(self): """Tests classification with one output tensor.""" input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } output_tensors = { "output-1": array_ops.placeholder(dtypes.string, 1, name="output-tensor-1") } problem_type = constants.ProblemType.CLASSIFICATION actual_signature_def = ( saved_model_export_utils.build_standardized_signature_def( input_tensors, output_tensors, problem_type)) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs[signature_constants.CLASSIFY_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo( name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_CLASSES].CopyFrom( meta_graph_pb2.TensorInfo( name="output-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.CLASSIFY_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def test_build_standardized_signature_def_classify_classes_only(self): """Tests classification with one output tensor.""" with context.graph_mode(): input_tensors = { 'input-1': array_ops.placeholder( dtypes.string, 1, name='input-tensor-1') } classes = array_ops.placeholder(dtypes.string, 1, name='output-tensor-1') export_output = export_output_lib.ClassificationOutput(classes=classes) actual_signature_def = export_output.as_signature_def(input_tensors) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_string = types_pb2.DataType.Value('DT_STRING') expected_signature_def.inputs[ signature_constants.CLASSIFY_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name='input-tensor-1:0', dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_CLASSES].CopyFrom( meta_graph_pb2.TensorInfo(name='output-tensor-1:0', dtype=dtype_string, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.CLASSIFY_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def testConvertDefaultSignatureRegressionToSignatureDef(self): signatures_proto = manifest_pb2.Signatures() regression_signature = manifest_pb2.RegressionSignature() regression_signature.input.CopyFrom( manifest_pb2.TensorBinding( tensor_name=signature_constants.REGRESS_INPUTS)) regression_signature.output.CopyFrom( manifest_pb2.TensorBinding( tensor_name=signature_constants.REGRESS_OUTPUTS)) signatures_proto.default_signature.regression_signature.CopyFrom( regression_signature) signature_def = bundle_shim._convert_default_signature_to_signature_def( signatures_proto) # Validate regression signature correctly copied over. self.assertEqual(signature_def.method_name, signature_constants.REGRESS_METHOD_NAME) self.assertEqual(len(signature_def.inputs), 1) self.assertEqual(len(signature_def.outputs), 1) self.assertProtoEquals( signature_def.inputs[signature_constants.REGRESS_INPUTS], meta_graph_pb2.TensorInfo(name=signature_constants.REGRESS_INPUTS)) self.assertProtoEquals( signature_def.outputs[signature_constants.REGRESS_OUTPUTS], meta_graph_pb2.TensorInfo( name=signature_constants.REGRESS_OUTPUTS))
def testConvertNamedSignatureToSignatureDef(self): signatures_proto = manifest_pb2.Signatures() generic_signature = manifest_pb2.GenericSignature() generic_signature.map["input_key"].CopyFrom( manifest_pb2.TensorBinding(tensor_name="input")) signatures_proto.named_signatures[ signature_constants.PREDICT_INPUTS].generic_signature.CopyFrom( generic_signature) generic_signature = manifest_pb2.GenericSignature() generic_signature.map["output_key"].CopyFrom( manifest_pb2.TensorBinding(tensor_name="output")) signatures_proto.named_signatures[ signature_constants.PREDICT_OUTPUTS].generic_signature.CopyFrom( generic_signature) signature_def = bundle_shim._convert_named_signatures_to_signature_def( signatures_proto) self.assertEqual(signature_def.method_name, signature_constants.PREDICT_METHOD_NAME) self.assertEqual(len(signature_def.inputs), 1) self.assertEqual(len(signature_def.outputs), 1) self.assertProtoEquals(signature_def.inputs["input_key"], meta_graph_pb2.TensorInfo(name="input")) self.assertProtoEquals(signature_def.outputs["output_key"], meta_graph_pb2.TensorInfo(name="output"))
def test_build_standardized_signature_def(self): input_tensors = { "input-1": array_ops.placeholder(dtypes.float32, 1, name="input-tensor-1") } output_tensors = { "output-1": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-1") } problem_type = constants.ProblemType.LINEAR_REGRESSION regression_signature_def = ( saved_model_export_utils.build_standardized_signature_def( input_tensors, output_tensors, problem_type)) expected_regression_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype = types_pb2.DataType.Value("DT_FLOAT") expected_regression_signature_def.inputs[ signature_constants.REGRESS_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype, tensor_shape=shape)) expected_regression_signature_def.outputs[ signature_constants.REGRESS_OUTPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-1:0", dtype=dtype, tensor_shape=shape)) expected_regression_signature_def.method_name = ( signature_constants.REGRESS_METHOD_NAME) self.assertEqual(regression_signature_def, expected_regression_signature_def)
def test_build_standardized_signature_def_classify_scores_only(self): """Tests classification without classes tensor.""" input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } scores = array_ops.placeholder(dtypes.float32, 1, name="output-tensor-scores") export_output = export_output_lib.ClassificationOutput(scores=scores) actual_signature_def = export_output.as_signature_def(input_tensors) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_float = types_pb2.DataType.Value("DT_FLOAT") dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs[ signature_constants.CLASSIFY_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_SCORES].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-scores:0", dtype=dtype_float, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.CLASSIFY_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def test_build_standardized_signature_def_regression(self): input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } value = array_ops.placeholder(dtypes.float32, 1, name="output-tensor-1") export_output = export_output_lib.RegressionOutput(value) actual_signature_def = export_output.as_signature_def(input_tensors) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_float = types_pb2.DataType.Value("DT_FLOAT") dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs[ signature_constants.REGRESS_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.REGRESS_OUTPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-1:0", dtype=dtype_float, tensor_shape=shape)) expected_signature_def.method_name = signature_constants.REGRESS_METHOD_NAME self.assertEqual(actual_signature_def, expected_signature_def)
def export_model(last_checkpoint): # Create a session with a new graph. with tf.Session(graph=tf.Graph()) as sess: x = tf.placeholder(tf.float32, [None, 784]) p = mnist_model.get_model(x, training=False) # Define key elements input_key = tf.placeholder(tf.int64, [ None, ]) output_key = tf.identity(input_key) # Define API inputs/outputs object inputs = {'key': input_key, 'image': x} input_signatures = {} for key, val in inputs.iteritems(): predict_input_tensor = meta_graph_pb2.TensorInfo() predict_input_tensor.name = val.name predict_input_tensor.dtype = val.dtype.as_datatype_enum input_signatures[key] = predict_input_tensor outputs = {'key': output_key, 'scores': p} output_signatures = {} for key, val in outputs.iteritems(): predict_output_tensor = meta_graph_pb2.TensorInfo() predict_output_tensor.name = val.name predict_output_tensor.dtype = val.dtype.as_datatype_enum output_signatures[key] = predict_output_tensor inputs_name, outputs_name = {}, {} for key, val in inputs.iteritems(): inputs_name[key] = val.name for key, val in outputs.iteritems(): outputs_name[key] = val.name tf.add_to_collection('inputs', json.dumps(inputs_name)) tf.add_to_collection('outputs', json.dumps(outputs_name)) init_op = tf.global_variables_initializer() sess.run(init_op) # Restore the latest checkpoint and save the model saver = tf.train.Saver() saver.restore(sess, last_checkpoint) predict_signature_def = signature_def_utils.build_signature_def( input_signatures, output_signatures, signature_constants.PREDICT_METHOD_NAME) build = builder.SavedModelBuilder(MODEL_DIR) build.add_meta_graph_and_variables( sess, [tag_constants.SERVING], signature_def_map={ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: predict_signature_def }, assets_collection=tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS)) build.save()
def export_model(checkpoint, model_dir): with tf.Session(graph=tf.Graph()) as sess: # Define API inputs/outputs object image_bytes = tf.placeholder(tf.string) weights, biases = get_weights_biases() prediction = build_prediction_graph(image_bytes, weights, biases) inputs = {'image_bytes': image_bytes} input_signatures = {} for key, val in inputs.iteritems(): predict_input_tensor = meta_graph_pb2.TensorInfo() predict_input_tensor.name = val.name predict_input_tensor.dtype = val.dtype.as_datatype_enum input_signatures[key] = predict_input_tensor outputs = {'prediction': prediction} output_signatures = {} for key, val in outputs.iteritems(): predict_output_tensor = meta_graph_pb2.TensorInfo() predict_output_tensor.name = val.name predict_output_tensor.dtype = val.dtype.as_datatype_enum output_signatures[key] = predict_output_tensor inputs_name, outputs_name = {}, {} for key, val in inputs.iteritems(): inputs_name[key] = val.name for key, val in outputs.iteritems(): outputs_name[key] = val.name tf.add_to_collection('inputs', json.dumps(inputs_name)) tf.add_to_collection('outputs', json.dumps(outputs_name)) init_op = tf.global_variables_initializer() sess.run(init_op) # Restore the latest checkpoint and save the model saver = tf.train.Saver() saver.restore(sess, checkpoint) predict_signature_def = signature_def_utils.build_signature_def( input_signatures, output_signatures, signature_constants.PREDICT_METHOD_NAME) build = builder.SavedModelBuilder(model_dir) build.add_meta_graph_and_variables( sess, [tag_constants.SERVING], signature_def_map={ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: predict_signature_def }, assets_collection=tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS)) # Finally save the model build.save()
def testConvertSignaturesToSignatureDefs(self): base_path = tf.test.test_src_dir_path(SESSION_BUNDLE_PATH) meta_graph_filename = os.path.join(base_path, constants.META_GRAPH_DEF_FILENAME) metagraph_def = meta_graph.read_meta_graph_file(meta_graph_filename) default_signature_def, named_signature_def = ( bundle_shim._convert_signatures_to_signature_defs(metagraph_def)) self.assertEqual(default_signature_def.method_name, signature_constants.REGRESS_METHOD_NAME) self.assertEqual(len(default_signature_def.inputs), 1) self.assertEqual(len(default_signature_def.outputs), 1) self.assertProtoEquals( default_signature_def.inputs[signature_constants.REGRESS_INPUTS], meta_graph_pb2.TensorInfo(name="tf_example:0")) self.assertProtoEquals( default_signature_def.outputs[signature_constants.REGRESS_OUTPUTS], meta_graph_pb2.TensorInfo(name="Identity:0")) self.assertEqual(named_signature_def.method_name, signature_constants.PREDICT_METHOD_NAME) self.assertEqual(len(named_signature_def.inputs), 1) self.assertEqual(len(named_signature_def.outputs), 1) self.assertProtoEquals(named_signature_def.inputs["x"], meta_graph_pb2.TensorInfo(name="x:0")) self.assertProtoEquals(named_signature_def.outputs["y"], meta_graph_pb2.TensorInfo(name="y:0")) # Now try default signature only collection_def = metagraph_def.collection_def signatures_proto = manifest_pb2.Signatures() signatures = collection_def[constants.SIGNATURES_KEY].any_list.value[0] signatures.Unpack(signatures_proto) named_only_signatures_proto = manifest_pb2.Signatures() named_only_signatures_proto.CopyFrom(signatures_proto) default_only_signatures_proto = manifest_pb2.Signatures() default_only_signatures_proto.CopyFrom(signatures_proto) default_only_signatures_proto.named_signatures.clear() default_only_signatures_proto.ClearField("named_signatures") metagraph_def.collection_def[constants.SIGNATURES_KEY].any_list.value[ 0].Pack(default_only_signatures_proto) default_signature_def, named_signature_def = ( bundle_shim._convert_signatures_to_signature_defs(metagraph_def)) self.assertEqual(default_signature_def.method_name, signature_constants.REGRESS_METHOD_NAME) self.assertEqual(named_signature_def, None) named_only_signatures_proto.ClearField("default_signature") metagraph_def.collection_def[constants.SIGNATURES_KEY].any_list.value[ 0].Pack(named_only_signatures_proto) default_signature_def, named_signature_def = ( bundle_shim._convert_signatures_to_signature_defs(metagraph_def)) self.assertEqual(named_signature_def.method_name, signature_constants.PREDICT_METHOD_NAME) self.assertEqual(default_signature_def, None)
def build_prediction_graph(self): """Builds prediction graph and registers appropriate endpoints.""" examples = tf.placeholder(tf.string, shape=(None, )) features = { 'image': tf.FixedLenFeature(shape=[IMAGE_PIXELS], dtype=tf.float32), 'key': tf.FixedLenFeature(shape=[], dtype=tf.string), } parsed = tf.parse_example(examples, features) images = parsed['image'] keys = parsed['key'] # Build a Graph that computes predictions from the inference model. logits = inference(images, self.hidden1, self.hidden2) softmax = tf.nn.softmax(logits) prediction = tf.argmax(softmax, 1) # Mark the inputs and the outputs # Marking the input tensor with an alias with suffix _bytes. This is to # indicate that this tensor value is raw bytes and will be base64 encoded # over HTTP. # Note that any output tensor marked with an alias with suffix _bytes, shall # be base64 encoded in the HTTP response. To get the binary value, it # should be base64 decoded. input_signatures = {} predict_input_tensor = meta_graph_pb2.TensorInfo() predict_input_tensor.name = examples.name predict_input_tensor.dtype = examples.dtype.as_datatype_enum input_signatures['example_bytes'] = predict_input_tensor tf.add_to_collection('inputs', json.dumps({'examples_bytes': examples.name})) tf.add_to_collection( 'outputs', json.dumps({ 'key': keys.name, 'prediction': prediction.name, 'scores': softmax.name })) output_signatures = {} outputs_dict = { 'key': keys.name, 'prediction': prediction.name, 'scores': softmax.name } for key, val in outputs_dict.iteritems(): predict_output_tensor = meta_graph_pb2.TensorInfo() predict_output_tensor.name = val for placeholder in [keys, prediction, softmax]: if placeholder.name == val: predict_output_tensor.dtype = placeholder.dtype.as_datatype_enum output_signatures[key] = predict_output_tensor return input_signatures, output_signatures
def _generate_saved_model_for_half_plus_two(export_dir, as_text=False): """Generates SavedModel for half plus two. Args: export_dir: The directory to which the SavedModel should be written. as_text: Writes the SavedModel protocol buffer in text format to disk. """ builder = saved_model_builder.SavedModelBuilder(export_dir) with tf.Session(graph=tf.Graph()) as sess: # Set up the model parameters as variables to exercise variable loading # functionality upon restore. a = tf.Variable(0.5, name="a") b = tf.Variable(2.0, name="b") # Create a placeholder for serialized tensorflow.Example messages to be fed. serialized_tf_example = tf.placeholder(tf.string, name="tf_example") # Parse the tensorflow.Example looking for a feature named "x" with a single # floating point value. feature_configs = { "x": tf.FixedLenFeature([1], dtype=tf.float32), } tf_example = tf.parse_example(serialized_tf_example, feature_configs) # Use tf.identity() to assign name x = tf.identity(tf_example["x"], name="x") y = tf.add(tf.mul(a, x), b, name="y") # Set up the signature for regression with input and output tensor # specification. input_tensor = meta_graph_pb2.TensorInfo() input_tensor.name = serialized_tf_example.name signature_inputs = {signature_constants.REGRESS_INPUTS: input_tensor} output_tensor = meta_graph_pb2.TensorInfo() output_tensor.name = tf.identity(y).name signature_outputs = { signature_constants.REGRESS_OUTPUTS: output_tensor } signature_def = utils.build_signature_def( signature_inputs, signature_outputs, signature_constants.REGRESS_METHOD_NAME) # Initialize all variables and then save the SavedModel. sess.run(tf.initialize_all_variables()) builder.add_meta_graph_and_variables( sess, [constants.TAG_SERVING], signature_def_map={ signature_constants.REGRESS_METHOD_NAME: signature_def }) builder.save(as_text)
def test_build_standardized_signature_def_classification2(self): """Tests multiple output tensors that include classes and probabilities.""" input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } output_tensors = { "classes": array_ops.placeholder(dtypes.string, 1, name="output-tensor-classes"), # Will be used for CLASSIFY_OUTPUT_SCORES. "probabilities": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-proba"), "logits": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-logits-unused"), } problem_type = constants.ProblemType.CLASSIFICATION actual_signature_def = ( saved_model_export_utils.build_standardized_signature_def( input_tensors, output_tensors, problem_type)) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_float = types_pb2.DataType.Value("DT_FLOAT") dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs[ signature_constants.CLASSIFY_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_CLASSES].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-classes:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_SCORES].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-proba:0", dtype=dtype_float, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.CLASSIFY_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def build_tensor_info_from_op(op): """Utility function to build TensorInfo proto from an Op. Note that this function should be used with caution. It is strictly restricted to TensorFlow internal use-cases only. Please make sure you do need it before using it. This utility function overloads the TensorInfo proto by setting the name to the Op's name, dtype to DT_INVALID and tensor_shape as None. One typical usage is for the Op of the call site for the defunned function: ```python @function.defun def some_vairable_initialiation_fn(value_a, value_b): a = value_a b = value_b value_a = constant_op.constant(1, name="a") value_b = constant_op.constant(2, name="b") op_info = utils.build_op_info( some_vairable_initialiation_fn(value_a, value_b)) ``` Args: op: An Op whose name is used to build the TensorInfo. The name that points to the Op could be fetched at run time in the Loader session. Returns: A TensorInfo protocol buffer constructed based on the supplied argument. """ return meta_graph_pb2.TensorInfo( dtype=types_pb2.DT_INVALID, tensor_shape=tensor_shape.unknown_shape().as_proto(), name=op.name)
def get_tensor_info(tensor_name): tensor = graph.get_tensor_by_name(tensor_name) return meta_graph_pb2.TensorInfo( name=tensor_name, dtype=tensor.dtype.as_datatype_enum, tensor_shape=tensor.get_shape().as_proto(), )
def build_tensor_info(tensor): """Utility function to build TensorInfo proto from a Tensor. Args: tensor: Tensor or SparseTensor whose name, dtype and shape are used to build the TensorInfo. For SparseTensors, the names of the three constitutent Tensors are used. Returns: A TensorInfo protocol buffer constructed based on the supplied argument. Raises: RuntimeError: If eager execution is enabled. """ if context.executing_eagerly(): raise RuntimeError("build_tensor_info is not supported in Eager mode.") tensor_info = meta_graph_pb2.TensorInfo( dtype=dtypes.as_dtype(tensor.dtype).as_datatype_enum, tensor_shape=tensor.get_shape().as_proto()) if isinstance(tensor, sparse_tensor.SparseTensor): tensor_info.coo_sparse.values_tensor_name = tensor.values.name tensor_info.coo_sparse.indices_tensor_name = tensor.indices.name tensor_info.coo_sparse.dense_shape_tensor_name = tensor.dense_shape.name else: tensor_info.name = tensor.name return tensor_info
def testPrintREFTypeTensor(self): ref_tensor_info = meta_graph_pb2.TensorInfo() ref_tensor_info.dtype = types_pb2.DT_FLOAT_REF with captured_output() as (out, err): saved_model_cli._print_tensor_info(ref_tensor_info) self.assertTrue('DT_FLOAT_REF' in out.getvalue().strip()) self.assertEqual(err.getvalue().strip(), '')
def test_build_standardized_signature_def_classification5(self): """Tests multiple output tensors that include integer classes and scores. Integer classes are dropped out, because Servo classification can only serve string classes. So, only scores are present in the signature. """ input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } output_tensors = { "classes": array_ops.placeholder(dtypes.int64, 1, name="output-tensor-classes"), "scores": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-scores"), "logits": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-logits-unused"), } problem_type = constants.ProblemType.CLASSIFICATION actual_signature_def = ( saved_model_export_utils.build_standardized_signature_def( input_tensors, output_tensors, problem_type)) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_float = types_pb2.DataType.Value("DT_FLOAT") dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs[ signature_constants.CLASSIFY_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_SCORES].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-scores:0", dtype=dtype_float, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.CLASSIFY_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def testSpecifiedSignature(self): """Test prediction with spedicified signature definition.""" np.random.seed(4444) for key, op in KEYS_AND_OPS: x = np.random.rand() y = np.random.rand() expected_output = op(x, y) inputs = { 'x': meta_graph_pb2.TensorInfo( name='inputs/x:0', dtype=types_pb2.DT_FLOAT, tensor_shape=tensor_shape_pb2.TensorShapeProto()), 'y': meta_graph_pb2.TensorInfo( name='inputs/y:0', dtype=types_pb2.DT_FLOAT, tensor_shape=tensor_shape_pb2.TensorShapeProto()) } outputs = { key: meta_graph_pb2.TensorInfo( name='outputs/{}:0'.format(key), dtype=types_pb2.DT_FLOAT, tensor_shape=tensor_shape_pb2.TensorShapeProto()) } signature_def = signature_def_utils.build_signature_def( inputs=inputs, outputs=outputs, method_name='tensorflow/serving/regress') predictor = saved_model_predictor.SavedModelPredictor( export_dir=self._export_dir, signature_def=signature_def) output_tensor_name = predictor.fetch_tensors[key].name self.assertRegexpMatches(output_tensor_name, key, msg='Unexpected fetch tensor.') output = predictor({'x': x, 'y': y})[key] self.assertAlmostEqual( expected_output, output, places=3, msg='Failed for signature "{}". ' 'Got output {} for x = {} and y = {}'.format( key, output, x, y))
def testAddOutputToSignatureDef(self): signature_def = meta_graph_pb2.SignatureDef() signature_def_compare = meta_graph_pb2.SignatureDef() # Add output to signature-def corresponding to `foo_key`. bundle_shim._add_output_to_signature_def("foo-name", "foo-key", signature_def) self.assertEqual(len(signature_def.outputs), 1) self.assertEqual(len(signature_def.inputs), 0) self.assertProtoEquals( signature_def.outputs["foo-key"], meta_graph_pb2.TensorInfo(name="foo-name")) # Attempt to add another output to the signature-def with the same tensor # name and key. bundle_shim._add_output_to_signature_def("foo-name", "foo-key", signature_def) self.assertEqual(len(signature_def.outputs), 1) self.assertEqual(len(signature_def.inputs), 0) self.assertProtoEquals( signature_def.outputs["foo-key"], meta_graph_pb2.TensorInfo(name="foo-name")) # Add another output to the signature-def corresponding to `bar-key`. bundle_shim._add_output_to_signature_def("bar-name", "bar-key", signature_def) self.assertEqual(len(signature_def.outputs), 2) self.assertEqual(len(signature_def.inputs), 0) self.assertProtoEquals( signature_def.outputs["bar-key"], meta_graph_pb2.TensorInfo(name="bar-name")) # Add an output to the signature-def corresponding to `foo-key` with an # updated tensor name. bundle_shim._add_output_to_signature_def("bar-name", "foo-key", signature_def) self.assertEqual(len(signature_def.outputs), 2) self.assertEqual(len(signature_def.inputs), 0) self.assertProtoEquals( signature_def.outputs["foo-key"], meta_graph_pb2.TensorInfo(name="bar-name")) # Test that there are no other sideeffects. del signature_def.outputs["foo-key"] del signature_def.outputs["bar-key"] self.assertProtoEquals(signature_def, signature_def_compare)
def test_build_standardized_signature_def_classification6(self): """Tests multiple output tensors that with integer classes and no scores. Servo classification cannot serve integer classes, but no scores are available. So, we fall back to predict signature. """ input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } output_tensors = { "classes": array_ops.placeholder(dtypes.int64, 1, name="output-tensor-classes"), "logits": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-logits"), } problem_type = constants.ProblemType.CLASSIFICATION actual_signature_def = ( saved_model_export_utils.build_standardized_signature_def( input_tensors, output_tensors, problem_type)) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_int64 = types_pb2.DataType.Value("DT_INT64") dtype_float = types_pb2.DataType.Value("DT_FLOAT") dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs["input-1"].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs["classes"].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-classes:0", dtype=dtype_int64, tensor_shape=shape)) expected_signature_def.outputs["logits"].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-logits:0", dtype=dtype_float, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.PREDICT_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def testSignatureDefValidationFails(self): export_dir = self._get_export_dir("test_signature_def_validation_fail") builder = saved_model_builder.SavedModelBuilder(export_dir) tensor_without_encoding = meta_graph_pb2.TensorInfo() tensor_without_encoding.dtype = types_pb2.DT_FLOAT self._validate_inputs_tensor_info_fail(builder, tensor_without_encoding) self._validate_outputs_tensor_info_fail(builder, tensor_without_encoding) tensor_without_dtype = meta_graph_pb2.TensorInfo() tensor_without_dtype.name = "x" self._validate_inputs_tensor_info_fail(builder, tensor_without_dtype) self._validate_outputs_tensor_info_fail(builder, tensor_without_dtype) tensor_empty = meta_graph_pb2.TensorInfo() self._validate_inputs_tensor_info_fail(builder, tensor_empty) self._validate_outputs_tensor_info_fail(builder, tensor_empty)
def testSignatureDefValidation(self): export_dir = os.path.join(test.get_temp_dir(), "test_signature_def_validation") builder = saved_model_builder.SavedModelBuilder(export_dir) tensor_without_name = meta_graph_pb2.TensorInfo() tensor_without_name.dtype = types_pb2.DT_FLOAT self._validate_inputs_tensor_info(builder, tensor_without_name) self._validate_outputs_tensor_info(builder, tensor_without_name) tensor_without_dtype = meta_graph_pb2.TensorInfo() tensor_without_dtype.name = "x" self._validate_inputs_tensor_info(builder, tensor_without_dtype) self._validate_outputs_tensor_info(builder, tensor_without_dtype) tensor_empty = meta_graph_pb2.TensorInfo() self._validate_inputs_tensor_info(builder, tensor_empty) self._validate_outputs_tensor_info(builder, tensor_empty)
def _build_composite_tensor_info_internal(tensor): """Utility function to build TensorInfo proto from a CompositeTensor.""" spec = tensor._type_spec # pylint: disable=protected-access tensor_info = meta_graph_pb2.TensorInfo() spec_proto = nested_structure_coder.encode_structure(spec) tensor_info.composite_tensor.type_spec.CopyFrom(spec_proto.type_spec_value) for component in nest.flatten(tensor, expand_composites=True): tensor_info.composite_tensor.components.add().CopyFrom( build_tensor_info_internal(component)) return tensor_info
def testAddSignatureDefToFlatbufferMetadata(self): """Test a SavedModel conversion has correct Metadata.""" filename = tf.compat.v1.resource_loader.get_path_to_datafile( '../../testdata/add.bin') if not tf.io.gfile.exists(filename): raise IOError('File "{0}" does not exist in {1}.'.format( filename, tf.compat.v1.resource_loader.get_root_dir_with_all_resources()) ) with tf.io.gfile.GFile(filename, 'rb') as fp: tflite_model = bytearray(fp.read()) self.assertIsNotNone(tflite_model, 'TFLite model is none') sig_input_tensor = meta_graph_pb2.TensorInfo( dtype=tf.as_dtype(tf.float32).as_datatype_enum, tensor_shape=tf.TensorShape([1, 8, 8, 3]).as_proto()) sig_input_tensor_signature = {'x': sig_input_tensor} sig_output_tensor = meta_graph_pb2.TensorInfo( dtype=tf.as_dtype(tf.float32).as_datatype_enum, tensor_shape=tf.TensorShape([1, 8, 8, 3]).as_proto()) sig_output_tensor_signature = {'y': sig_output_tensor} predict_signature_def = (tf.compat.v1.saved_model.build_signature_def( sig_input_tensor_signature, sig_output_tensor_signature, tf.saved_model.PREDICT_METHOD_NAME)) serving_key = tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY signature_def_map = {serving_key: predict_signature_def} tflite_model = signature_def_utils.set_signature_defs( tflite_model, signature_def_map) saved_signature_def_map = signature_def_utils.get_signature_defs( tflite_model) signature_def = saved_signature_def_map.get(serving_key) self.assertIsNotNone(signature_def, 'SignatureDef not found') self.assertEqual(signature_def.SerializeToString(), predict_signature_def.SerializeToString()) remove_tflite_model = ( signature_def_utils.clear_signature_defs(tflite_model)) signature_def_map = signature_def_utils.get_signature_defs( remove_tflite_model) self.assertIsNone(signature_def_map.get(serving_key), 'SignatureDef found, but should be missing')
def _add_input_to_signature_def(tensor_name, map_key, signature_def): """Add input tensor to signature_def. Args: tensor_name: string name of tensor to add to signature_def inputs map_key: string key to key into signature_def inputs map signature_def: object of type meta_graph_pb2.SignatureDef() Sideffect: adds a TensorInfo with tensor_name to signature_def inputs map keyed with map_key """ tensor_info = meta_graph_pb2.TensorInfo(name=tensor_name) signature_def.inputs[map_key].CopyFrom(tensor_info)
def build_tensor_info_internal(tensor): """Utility function to build TensorInfo proto from a Tensor.""" tensor_info = meta_graph_pb2.TensorInfo( dtype=dtypes.as_dtype(tensor.dtype).as_datatype_enum, tensor_shape=tensor.get_shape().as_proto()) if isinstance(tensor, sparse_tensor.SparseTensor): tensor_info.coo_sparse.values_tensor_name = tensor.values.name tensor_info.coo_sparse.indices_tensor_name = tensor.indices.name tensor_info.coo_sparse.dense_shape_tensor_name = tensor.dense_shape.name else: tensor_info.name = tensor.name return tensor_info
def testSignatureDefValidationSucceedsWithName(self): tensor_with_name = meta_graph_pb2.TensorInfo() tensor_with_name.name = "foo" tensor_with_name.dtype = types_pb2.DT_FLOAT export_dir = self._get_export_dir("test_signature_def_validation_name_1") builder = saved_model_builder.SavedModelBuilder(export_dir) self._validate_inputs_tensor_info_accept(builder, tensor_with_name) export_dir = self._get_export_dir("test_signature_def_validation_name_2") builder = saved_model_builder.SavedModelBuilder(export_dir) self._validate_outputs_tensor_info_accept(builder, tensor_with_name)
def build_tensor_info(name=None, dtype=None, shape=None): """Utility function to build TensorInfo proto. Args: name: Name of the tensor to be used in the TensorInfo. dtype: Datatype to be set in the TensorInfo. shape: TensorShapeProto to specify the shape of the tensor in the TensorInfo. Returns: A TensorInfo protocol buffer constructed based on the supplied arguments. """ return meta_graph_pb2.TensorInfo(name=name, dtype=dtype, shape=shape)
def testSignatureDefValidationSucceedsWithCoo(self): tensor_with_coo = meta_graph_pb2.TensorInfo() # TODO(soergel) test validation of each of the fields of coo_sparse tensor_with_coo.coo_sparse.values_tensor_name = "foo" tensor_with_coo.dtype = types_pb2.DT_FLOAT export_dir = self._get_export_dir("test_signature_def_validation_coo_1") builder = saved_model_builder.SavedModelBuilder(export_dir) self._validate_inputs_tensor_info_accept(builder, tensor_with_coo) export_dir = self._get_export_dir("test_signature_def_validation_coo_2") builder = saved_model_builder.SavedModelBuilder(export_dir) self._validate_outputs_tensor_info_accept(builder, tensor_with_coo)