# CHECK-SAME: then_branch = @"key/[[then:[a-zA-Z_0-9]+]]" # CHECK: func private @"key/[[else]]"( # CHECK: func private @"key/[[then]]"( def Test(): data = tf.constant([1, 2, 3, 4, 5, 6]) # Create placeholders to prevent constant folding. x_op = tf.placeholder(dtype=tf.int32) y_op = tf.placeholder(dtype=tf.int32) less_op = tf.less(x_op, y_op) switch_op = control_flow_ops.switch(data, less_op) merge_op = control_flow_ops.merge(switch_op)[0] result = tf.transpose(merge_op) tensor_info_result = tf.compat.v1.saved_model.utils.build_tensor_info( result) signature_def = tf.saved_model.signature_def_utils.build_signature_def( inputs=None, outputs={'result': tensor_info_result}, method_name='some_function') return {'key': signature_def}, None, None if __name__ == '__main__': common_v1.set_tf_options() common_v1.do_test(Test)
table_initializer = tf.lookup.TextFileInitializer( vocabulary_file, tf.string, tf.lookup.TextFileIndex.WHOLE_LINE, tf.int64, tf.lookup.TextFileIndex.LINE_NUMBER) # Incur another bound_input on the asset, but with a different sym_name, i.e., # __tf_saved_model_asset1_tokens.txt vs. __tf_saved_model_asset0_tokens.txt. table = tf.lookup.StaticVocabularyTable(table_initializer, num_oov_buckets=10) vocab_file_tensor = tf.convert_to_tensor(vocabulary_file, tf.string, name='asset_filepath') tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, vocab_file_tensor) x = tf.placeholder(tf.string, shape=(), name='input') r = table.lookup(x) tensor_info_x = tf.compat.v1.saved_model.utils.build_tensor_info(x) tensor_info_r = tf.compat.v1.saved_model.utils.build_tensor_info(r) return { 'key': (tf.compat.v1.saved_model.signature_def_utils.build_signature_def( inputs={'x': tensor_info_x}, outputs={'r': tensor_info_r}, method_name='some_function')) }, tf.tables_initializer(), tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS) if __name__ == '__main__': common_v1.set_tf_options() common_v1.do_test(test)
# CHECK-NEXT: [[R0:%.*]] = "tf.ReadVariableOp"([[ARG1]]) {{{.*}}} : (tensor<!tf_type.resource<tensor<1x3xf32>>>) -> tensor<1x3xf32> # CHECK-NEXT: [[R1:%.*]] = "tf.MatMul"([[ARG0]], [[R0]]) {{{.*}}} : (tensor<3x1xf32>, tensor<1x3xf32>) -> tensor<3x3xf32> # CHECK-NEXT: return [[R1]] : tensor<3x3xf32> def Test(): x = tf.constant([[1.0], [1.0], [1.0]]) y = tf.compat.v1.get_variable(name='y', shape=(1, 3), initializer=tf.random_normal_initializer(), trainable=True) r = tf.matmul(x, y) tensor_info_x = tf.compat.v1.saved_model.utils.build_tensor_info(x) tensor_info_r = tf.compat.v1.saved_model.utils.build_tensor_info(r) return { 'key': (tf.compat.v1.saved_model.signature_def_utils.build_signature_def( inputs={'x': tensor_info_x}, outputs={'r': tensor_info_r}, method_name='some_function')) }, tf.initializers.global_variables(), None if __name__ == '__main__': common_v1.set_tf_options() common_v1.do_test(Test, canonicalize=True)
table_initializer = tf.lookup.TextFileInitializer( vocabulary_file, tf.string, tf.lookup.TextFileIndex.WHOLE_LINE, tf.int64, tf.lookup.TextFileIndex.LINE_NUMBER) # Incur another bound_input on the asset, but with a different sym_name, i.e., # __tf_saved_model_asset1_tokens.txt vs. __tf_saved_model_asset0_tokens.txt. table = tf.lookup.StaticVocabularyTable(table_initializer, num_oov_buckets=10) vocab_file_tensor = tf.convert_to_tensor(vocabulary_file, tf.string, name='asset_filepath') tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, vocab_file_tensor) x = tf.placeholder(tf.string, shape=(), name='input') r = table.lookup(x) tensor_info_x = tf.compat.v1.saved_model.utils.build_tensor_info(x) tensor_info_r = tf.compat.v1.saved_model.utils.build_tensor_info(r) return { 'key': (tf.compat.v1.saved_model.signature_def_utils.build_signature_def( inputs={'x': tensor_info_x}, outputs={'r': tensor_info_r}, method_name='some_function')) }, tf.tables_initializer(), tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS) if __name__ == '__main__': common_v1.set_tf_options() common_v1.do_test(test, use_lite=True)
x = tf.constant([[1.0], [1.0], [1.0]]) y = tf.constant([[2.0], [2.0], [2.0]]) # Verify that the function defined using function.Defun # has a corresponding tf.LegacyCall op. # CHECK: func {{@[a-zA-Z_0-9]+}}( # CHECK-SAME: [[ARG0:%.*]]: tensor<3x1xf32> {tf_saved_model.index_path = ["y"]}, # CHECK-SAME: [[ARG1:%.*]]: tensor<3x1xf32> {tf_saved_model.index_path = ["x"]} # # CHECK-NEXT: [[R0:%.*]] = "tf.LegacyCall"([[ARG1]], [[ARG0]]) z = plus(x, y) tensor_info_x = tf.compat.v1.saved_model.utils.build_tensor_info(x) tensor_info_y = tf.compat.v1.saved_model.utils.build_tensor_info(y) tensor_info_z = tf.compat.v1.saved_model.utils.build_tensor_info(z) return { 'key': (tf.compat.v1.saved_model.signature_def_utils.build_signature_def( inputs={ 'x': tensor_info_x, 'y': tensor_info_y }, outputs={'z': tensor_info_z}, method_name='test_function')) }, None, None if __name__ == '__main__': common_v1.set_tf_options() common_v1.do_test(test_defun)
shape=(), initializer=tf.random_normal_initializer(), trainable=True) table_initializer = tf.lookup.KeyValueTensorInitializer( keys=[1, 2, 3, 4], values=[5, 6, 7, 8], key_dtype=tf.int32, value_dtype=tf.float32) table = tf.lookup.StaticHashTable(table_initializer, default_value=tf.constant(0.0)) x = tf.placeholder(tf.int32, shape=(), name='input') y = table.lookup(x) r = tf.add(y, z) tensor_info_x = tf.compat.v1.saved_model.utils.build_tensor_info(x) tensor_info_r = tf.compat.v1.saved_model.utils.build_tensor_info(r) return { 'key': (tf.compat.v1.saved_model.signature_def_utils.build_signature_def( inputs={'x': tensor_info_x}, outputs={'r': tensor_info_r}, method_name='some_function')) } if __name__ == '__main__': common_v1.set_tf_options() common_v1.do_test(Test(), tf.tables_initializer())
# itself is verified in the common graphdef converter, so here just assert # it is being invoked. # CHECK: module # CHECK-NOT: tf_saved_model.global_tensor def Test(): x = tf.constant([[1.0], [1.0], [1.0]]) y = tf.compat.v1.get_variable(name='y', shape=(1, 3), initializer=tf.random_normal_initializer(), trainable=True) r = tf.matmul(x, y) tensor_info_x = tf.compat.v1.saved_model.utils.build_tensor_info(x) tensor_info_r = tf.compat.v1.saved_model.utils.build_tensor_info(r) return { 'key': (tf.compat.v1.saved_model.signature_def_utils.build_signature_def( inputs={'x': tensor_info_x}, outputs={'r': tensor_info_r}, method_name='some_function')) }, None, None if __name__ == '__main__': common_v1.set_tf_options() common_v1.do_test(Test, lift_variables=False)
# CHECK-NEXT: [[R0:%.*]] = "tf.ReadVariableOp"([[ARG1]]) {{{.*}}} : (tensor<!tf.resource<tensor<1x3xf32>>>) -> tensor<1x3xf32> # CHECK-NEXT: [[R1:%.*]] = "tf.MatMul"([[ARG0]], [[R0]]) {{{.*}}} : (tensor<3x1xf32>, tensor<1x3xf32>) -> tensor<3x3xf32> # CHECK-NEXT: return [[R1]] : tensor<3x3xf32> def Test(): x = tf.constant([[1.0], [1.0], [1.0]]) y = tf.compat.v1.get_variable( name='y', shape=(1, 3), initializer=tf.random_normal_initializer(), trainable=True) r = tf.matmul(x, y) tensor_info_x = tf.compat.v1.saved_model.utils.build_tensor_info(x) tensor_info_r = tf.compat.v1.saved_model.utils.build_tensor_info(r) return { 'key': (tf.compat.v1.saved_model.signature_def_utils.build_signature_def( inputs={'x': tensor_info_x}, outputs={'r': tensor_info_r}, method_name='some_function')) } if __name__ == '__main__': common_v1.set_tf_options() common_v1.do_test( Test(), tf.initializers.global_variables(), canonicalize=True)