def test_build_standardized_signature_def_classification2(self): """Tests multiple output tensors that include classes and probabilities.""" input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } output_tensors = { "classes": array_ops.placeholder(dtypes.string, 1, name="output-tensor-classes"), # Will be used for CLASSIFY_OUTPUT_SCORES. "probabilities": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-proba"), "logits": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-logits-unused"), } problem_type = constants.ProblemType.CLASSIFICATION actual_signature_def = ( saved_model_export_utils.build_standardized_signature_def( input_tensors, output_tensors, problem_type)) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_float = types_pb2.DataType.Value("DT_FLOAT") dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs[ signature_constants.CLASSIFY_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_CLASSES].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-classes:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_SCORES].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-proba:0", dtype=dtype_float, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.CLASSIFY_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def _build_signature(graph, input_map, output_map): """Return a Signature def using maps from alias to inputs/outputs.""" # Function for creating TensorInfo structures from tensor names. def get_tensor_info(tensor_name): tensor = graph.get_tensor_by_name(tensor_name) return meta_graph_pb2.TensorInfo( name=tensor_name, dtype=tensor.dtype.as_datatype_enum, tensor_shape=tensor.get_shape().as_proto(),) inputs = {alias: get_tensor_info(tensor_name) for alias, tensor_name in input_map.iteritems()} outputs = {alias: get_tensor_info(tensor_name) for alias, tensor_name in output_map.iteritems()} return meta_graph_pb2.SignatureDef(inputs=inputs, outputs=outputs)
def testExtractSignatureOutputsWithPrefix(self): signature_def = meta_graph_pb2.SignatureDef() def make_tensor_info(name): return tf.saved_model.utils.build_tensor_info( tf.constant(0.0, name=name)) signature_def.outputs['predictions'].CopyFrom( make_tensor_info('predictions')) signature_def.outputs['metrics/mean/value'].CopyFrom( make_tensor_info('mean_value')) signature_def.outputs['metrics/mean/update_op'].CopyFrom( make_tensor_info('mean_update')) # This case is to check that things like # predictions/predictions are okay. signature_def.outputs['prefix'].CopyFrom(make_tensor_info('prefix')) signature_def.outputs['prefix1'].CopyFrom(make_tensor_info('prefix1')) signature_def.outputs['prefix2'].CopyFrom(make_tensor_info('prefix2')) signature_def.outputs['prefix/stuff'].CopyFrom( make_tensor_info('prefix/stuff')) signature_def.outputs['prefix/sub/more'].CopyFrom( make_tensor_info('prefix/sub/more')) self.assertDictEqual( {'predictions': signature_def.outputs['predictions']}, graph_ref.extract_signature_outputs_with_prefix( 'predictions', signature_def.outputs)) self.assertDictEqual( { 'mean/value': signature_def.outputs['metrics/mean/value'], 'mean/update_op': signature_def.outputs['metrics/mean/update_op'] }, graph_ref.extract_signature_outputs_with_prefix( 'metrics', signature_def.outputs)) self.assertDictEqual( { 'prefix': signature_def.outputs['prefix'], 'prefix1': signature_def.outputs['prefix1'], 'prefix2': signature_def.outputs['prefix2'], 'stuff': signature_def.outputs['prefix/stuff'], 'sub/more': signature_def.outputs['prefix/sub/more'], }, graph_ref.extract_signature_outputs_with_prefix( 'prefix', signature_def.outputs))
def testFusePreluWithConv2d(self): layers = [ tf.keras.layers.Conv2D( 16, [3, 3], padding='same', use_bias=True), tf.keras.layers.PReLU() ] model = tf.keras.Sequential(layers) tf.keras.backend.set_learning_phase(0) input_tensor = tf.constant([1.0, 1.0], shape=[1, 2, 1, 1]) @tf.function def execute_model(tensor): return model(tensor) graph = execute_model.get_concrete_function( input_tensor).graph graph_def = graph.as_graph_def() for node in graph_def.node: if node.op == 'Conv2D': node.device = "/CPU:0" config = config_pb2.ConfigProto() rewriter_config = config.graph_options.rewrite_options rewriter_config.optimizers[:] = [ 'pruning', 'constfold', 'arithmetic', 'dependency', 'pruning', 'remap', 'constfold', 'arithmetic', 'dependency' ] for output in ['Identity']: graph.add_to_collection('train_op', graph.get_operation_by_name(output)) signature = meta_graph_pb2.SignatureDef() graph_def = tf_saved_model_conversion_v2._run_grappler( config, graph_def, graph, signature) graph_def = fuse_prelu.fuse_ops_for_prelu(graph_def) optimized_graph_def = fuse_prelu.fuse_prelu_with_fused_conv2d(graph_def) conv2d_op = None for node in optimized_graph_def.node: self.assertNotEqual("Prelu", node.op) if node.op == '_FusedConv2D': conv2d_op = node self.assertNotEqual(conv2d_op, None) self.assertEqual(conv2d_op.attr['fused_ops'].list.s, [b'BiasAdd', b'Prelu']) self.assertEqual(conv2d_op.attr['num_args'].i, 2)
def test_build_standardized_signature_def_classification5(self): """Tests multiple output tensors that include integer classes and scores. Integer classes are dropped out, because Servo classification can only serve string classes. So, only scores are present in the signature. """ input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } output_tensors = { "classes": array_ops.placeholder(dtypes.int64, 1, name="output-tensor-classes"), "scores": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-scores"), "logits": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-logits-unused"), } problem_type = constants.ProblemType.CLASSIFICATION actual_signature_def = ( saved_model_export_utils.build_standardized_signature_def( input_tensors, output_tensors, problem_type)) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_float = types_pb2.DataType.Value("DT_FLOAT") dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs[ signature_constants.CLASSIFY_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_SCORES].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-scores:0", dtype=dtype_float, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.CLASSIFY_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def _convert_default_signature_to_signature_def(signatures): """Convert default signature to object of type SignatureDef. Args: signatures: object of type manifest_pb2.Signatures() Returns: object of type SignatureDef which contains a converted version of default signature from input signatures object Returns None if signature is of generic type because it cannot be converted to SignatureDef. """ default_signature = signatures.default_signature signature_def = meta_graph_pb2.SignatureDef() if (default_signature.WhichOneof("type") == legacy_constants.REGRESSION_SIGNATURE): regression_signature = default_signature.regression_signature signature_def.method_name = signature_constants.REGRESS_METHOD_NAME _add_input_to_signature_def(regression_signature.input.tensor_name, signature_constants.REGRESS_INPUTS, signature_def) _add_output_to_signature_def(regression_signature.output.tensor_name, signature_constants.REGRESS_OUTPUTS, signature_def) elif (default_signature.WhichOneof("type") == legacy_constants.CLASSIFICATION_SIGNATURE): classification_signature = default_signature.classification_signature signature_def.method_name = signature_constants.CLASSIFY_METHOD_NAME _add_input_to_signature_def(classification_signature.input.tensor_name, signature_constants.CLASSIFY_INPUTS, signature_def) _add_output_to_signature_def( classification_signature.classes.tensor_name, signature_constants.CLASSIFY_OUTPUT_CLASSES, signature_def) _add_output_to_signature_def( classification_signature.scores.tensor_name, signature_constants.CLASSIFY_OUTPUT_SCORES, signature_def) else: logging.error( "Only classification and regression default signatures " "are supported for up-conversion. %s is not " "supported", default_signature.WhichOneof("type")) return None return signature_def
def test_build_standardized_signature_def_classification6(self): """Tests multiple output tensors that with integer classes and no scores. Servo classification cannot serve integer classes, but no scores are available. So, we fall back to predict signature. """ input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } output_tensors = { "classes": array_ops.placeholder(dtypes.int64, 1, name="output-tensor-classes"), "logits": array_ops.placeholder(dtypes.float32, 1, name="output-tensor-logits"), } problem_type = constants.ProblemType.CLASSIFICATION actual_signature_def = ( saved_model_export_utils.build_standardized_signature_def( input_tensors, output_tensors, problem_type)) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_int64 = types_pb2.DataType.Value("DT_INT64") dtype_float = types_pb2.DataType.Value("DT_FLOAT") dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs["input-1"].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs["classes"].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-classes:0", dtype=dtype_int64, tensor_shape=shape)) expected_signature_def.outputs["logits"].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-logits:0", dtype=dtype_float, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.PREDICT_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def test_build_standardized_signature_def_classify_both(self): """Tests multiple output tensors that include classes and scores.""" with context.graph_mode(): input_tensors = { 'input-1': array_ops.placeholder(dtypes.string, 1, name='input-tensor-1') } classes = array_ops.placeholder(dtypes.string, 1, name='output-tensor-classes') scores = array_ops.placeholder(dtypes.float32, 1, name='output-tensor-scores') export_output = export_output_lib.ClassificationOutput( scores=scores, classes=classes) actual_signature_def = export_output.as_signature_def( input_tensors) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_float = types_pb2.DataType.Value('DT_FLOAT') dtype_string = types_pb2.DataType.Value('DT_STRING') expected_signature_def.inputs[ signature_constants.CLASSIFY_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name='input-tensor-1:0', dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_CLASSES].CopyFrom( meta_graph_pb2.TensorInfo(name='output-tensor-classes:0', dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_SCORES].CopyFrom( meta_graph_pb2.TensorInfo(name='output-tensor-scores:0', dtype=dtype_float, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.CLASSIFY_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def run_graph_optimizations(graph_def, input_arrays, output_arrays, config, graph=None): """Apply standard TensorFlow optimizations to the graph_def. Args: graph_def: Frozen GraphDef to be optimized. input_arrays: List of arrays that are considered inputs of the graph. output_arrays: List of arrays that are considered outputs of the graph. config: tf.ConfigProto. graph: TensorFlow Graph. Required when Eager mode is enabled. (default None) Returns: A new, optimized GraphDef. """ meta_graph = _export_meta_graph(graph_def=graph_def, graph=graph) signature = _meta_graph_pb2.SignatureDef() for array in input_arrays: signature.inputs[array.name].name = array.name signature.inputs[array.name].dtype = array.dtype.as_datatype_enum signature.inputs[array.name].tensor_shape.CopyFrom( array.shape.as_proto()) for array in output_arrays: signature.outputs[array.name].name = array.name signature.outputs[array.name].dtype = array.dtype.as_datatype_enum signature.outputs[array.name].tensor_shape.CopyFrom( array.shape.as_proto()) meta_graph.signature_def["not_used_key"].CopyFrom(signature) # We need to add a collection called 'train_op' so that grappler # knows what the outputs are. fetch_collection = _meta_graph_pb2.CollectionDef() for array in input_arrays + output_arrays: fetch_collection.node_list.value.append(array.name) meta_graph.collection_def["train_op"].CopyFrom(fetch_collection) return tf_optimizer.OptimizeGraph(config, meta_graph)
def _convert_default_signature_to_signature_def(signatures): """Convert default signature to object of type SignatureDef. Args: signatures: object of type manifest_pb2.Signatures() Returns: object of type SignatureDef which contains a converted version of default signature from input signatures object Raises: RuntimeError: if default signature type is not classification or regression. """ default_signature = signatures.default_signature signature_def = meta_graph_pb2.SignatureDef() if default_signature.WhichOneof("type") == "regression_signature": regression_signature = default_signature.regression_signature signature_def.method_name = signature_constants.REGRESS_METHOD_NAME _add_input_to_signature_def(regression_signature.input.tensor_name, signature_constants.REGRESS_INPUTS, signature_def) _add_output_to_signature_def(regression_signature.output.tensor_name, signature_constants.REGRESS_OUTPUTS, signature_def) elif default_signature.WhichOneof("type") == "classification_signature": classification_signature = default_signature.classification_signature signature_def.method_name = signature_constants.CLASSIFY_METHOD_NAME _add_input_to_signature_def(classification_signature.input.tensor_name, signature_constants.CLASSIFY_INPUTS, signature_def) _add_output_to_signature_def( classification_signature.classes.tensor_name, signature_constants.CLASSIFY_OUTPUT_CLASSES, signature_def) _add_output_to_signature_def( classification_signature.scores.tensor_name, signature_constants.CLASSIFY_OUTPUT_SCORES, signature_def) else: raise RuntimeError( "Only classification and regression default signatures " "are supported for up-conversion. %s is not " "supported" % default_signature.WhichOneof("type")) return signature_def
def _prune_removed_feed_nodes(signature_def, graph_def): """Identify the inputs in the signature no longer in graph_def, prune them. Args: signature_def: A `SignatureDef` instance. graph_def: A `GraphDef` instance. Returns: A new pruned `SignatureDef`. """ node_names = set([n.name for n in graph_def.node]) new_signature_def = meta_graph_pb2.SignatureDef() new_signature_def.CopyFrom(signature_def) for (k, v) in signature_def.inputs.items(): tensor_name, _ = _parse_tensor_name(v.name) if tensor_name not in node_names: logging.warn( 'Signature input key \'{}\', tensor name \'{}\', has been pruned ' 'while freezing the graph. Removing it from the compiled signatures.' .format(k, tensor_name)) del new_signature_def.inputs[k] return new_signature_def
def _convert_named_signatures_to_signature_def(signatures): """Convert named signatures to object of type SignatureDef. Args: signatures: object of type manifest_pb2.Signatures() Returns: object of type SignatureDef which contains a converted version of named signatures from input signatures object Raises: RuntimeError: if input and output named signatures are not of type GenericSignature """ signature_def = meta_graph_pb2.SignatureDef() input_signature = signatures.named_signatures[ signature_constants.PREDICT_INPUTS] output_signature = signatures.named_signatures[ signature_constants.PREDICT_OUTPUTS] # TODO(pdudnik): what if there are other signatures? Mimic cr/140900781 once # it is submitted. if (input_signature.WhichOneof("type") != legacy_constants.GENERIC_SIGNATURE or output_signature.WhichOneof("type") != legacy_constants.GENERIC_SIGNATURE): raise RuntimeError( "Named input and output signatures can only be " "up-converted if they are generic signature. " "Input signature type is %s, output signature type is " "%s" % (input_signature.WhichOneof("type"), output_signature.WhichOneof("type"))) signature_def.method_name = signature_constants.PREDICT_METHOD_NAME for key, val in input_signature.generic_signature.map.items(): _add_input_to_signature_def(val.tensor_name, key, signature_def) for key, val in output_signature.generic_signature.map.items(): _add_output_to_signature_def(val.tensor_name, key, signature_def) return signature_def
def _build_signature_def(frozen_graph, input_nodes, output_nodes, signature_def=None): signature = meta_graph_pb2.SignatureDef() for input_tensor in input_nodes: op_name = input_tensor.name.split(':')[0] # The graph freezing may turn the original inputs into constants, or remove # them from the graph, so we need to ignore those. try: op = frozen_graph.get_operation_by_name(op_name) if op.type != 'Const': name = input_tensor.name if hasattr(signature_def, 'inputs'): name = _find_signature_def_name(input_tensor, signature_def.inputs) signature.inputs[name].name = input_tensor.name signature.inputs[ name].dtype = input_tensor.dtype.as_datatype_enum signature.inputs[name].tensor_shape.CopyFrom( input_tensor.shape.as_proto()) except KeyError: # The original input was removed when the graph was frozen. continue for output_tensor in output_nodes: if hasattr(output_tensor, 'name'): name = output_tensor.name if hasattr(signature_def, 'inputs'): name = _find_signature_def_name(output_tensor, signature_def.outputs) signature.outputs[name].name = output_tensor.name signature.outputs[ name].dtype = output_tensor.dtype.as_datatype_enum signature.outputs[name].tensor_shape.CopyFrom( output_tensor.shape.as_proto()) else: #just the tensor name string array signature.outputs[output_tensor].name = output_tensor return signature
def build_signature_def(inputs=None, outputs=None, method_name=None): """Utility function to build a SignatureDef protocol buffer. Args: inputs: Inputs of the SignatureDef defined as a proto map of string to tensor info. outputs: Outputs of the SignatureDef defined as a proto map of string to tensor info. method_name: Method name of the SignatureDef as a string. Returns: A SignatureDef protocol buffer constructed based on the supplied arguments. """ signature_def = meta_graph_pb2.SignatureDef() if inputs is not None: for item in inputs: signature_def.inputs[item].CopyFrom(inputs[item]) if outputs is not None: for item in outputs: signature_def.outputs[item].CopyFrom(outputs[item]) if method_name is not None: signature_def.method_name = method_name return signature_def
def _make_signature_def(payload: Dict[str, Any]): result = meta_graph_pb2.SignatureDef() json_format.ParseDict(payload, result) return result
def _deserialize(serialized): signature_def = meta_graph_pb2.SignatureDef() signature_def.ParseFromString(serialized) return signature_def
def testExtractSignatureOutputsWithPrefix(self): signature_def = meta_graph_pb2.SignatureDef() with tf.Graph().as_default(): # Needed to disable eager mode. def make_tensor_info(name): return tf.compat.v1.saved_model.utils.build_tensor_info( tf.constant(0.0, name=name)) # Test for single entry (non-dict) tensors. signature_def.inputs['labels'].CopyFrom(make_tensor_info('labels')) signature_def.outputs['predictions'].CopyFrom( make_tensor_info('predictions')) signature_def.outputs['metrics/mean/value'].CopyFrom( make_tensor_info('mean_value')) signature_def.outputs['metrics/mean/update_op'].CopyFrom( make_tensor_info('mean_update')) # This case is to check that things like # predictions/predictions are okay. signature_def.outputs['prefix'].CopyFrom( make_tensor_info('prefix')) signature_def.outputs['prefix1'].CopyFrom( make_tensor_info('prefix1')) signature_def.outputs['prefix2'].CopyFrom( make_tensor_info('prefix2')) signature_def.outputs['prefix/stuff'].CopyFrom( make_tensor_info('prefix/stuff')) signature_def.outputs['prefix/sub/more'].CopyFrom( make_tensor_info('prefix/sub/more')) self.assertDictEqual( {'__labels': signature_def.inputs['labels']}, graph_ref.extract_signature_inputs_or_outputs_with_prefix( 'labels', signature_def.inputs, '__labels')) self.assertDictEqual( {'predictions': signature_def.outputs['predictions']}, graph_ref.extract_signature_inputs_or_outputs_with_prefix( 'predictions', signature_def.outputs)) self.assertDictEqual( { 'mean/value': signature_def.outputs['metrics/mean/value'], 'mean/update_op': signature_def.outputs['metrics/mean/update_op'] }, graph_ref.extract_signature_inputs_or_outputs_with_prefix( 'metrics', signature_def.outputs)) self.assertDictEqual( { 'prefix': signature_def.outputs['prefix'], 'prefix1': signature_def.outputs['prefix1'], 'prefix2': signature_def.outputs['prefix2'], 'stuff': signature_def.outputs['prefix/stuff'], 'sub/more': signature_def.outputs['prefix/sub/more'], }, graph_ref.extract_signature_inputs_or_outputs_with_prefix( 'prefix', signature_def.outputs))