def test_parse_from_sequence_example_missing_feature_list(self): with tf.Graph().as_default(): empty_proto = text_format.Parse( """ feature_lists { feature_list { key: "utility2" value { feature { float_list { value: 0.0 } } } } } """, tf.train.SequenceExample()) features = data_lib.parse_from_sequence_example( tf.convert_to_tensor(value=[empty_proto.SerializeToString()]), list_size=2, context_feature_spec=None, example_feature_spec={ "utility": EXAMPLE_FEATURE_SPEC["utility"] }) features_0 = data_lib.parse_from_sequence_example( tf.convert_to_tensor(value=[empty_proto.SerializeToString()]), context_feature_spec=None, example_feature_spec={ "utility": EXAMPLE_FEATURE_SPEC["utility"], "utility2": EXAMPLE_FEATURE_SPEC["utility"] }) with tf.compat.v1.Session() as sess: sess.run(tf.compat.v1.local_variables_initializer()) feature_map, feature_0_map = sess.run([features, features_0]) self.assertAllEqual([1, 2, 1], feature_map["utility"].shape) self.assertAllEqual([1, 1, 1], feature_0_map["utility"].shape)
def test_parse_from_sequence_example_missing_frame_exception(self): with tf.Graph().as_default(): missing_frame_proto = text_format.Parse( """ feature_lists { feature_list { key: "utility" value { feature { float_list { value: 0.0 } } feature { } } } } """, tf.train.SequenceExample()) features = data_lib.parse_from_sequence_example( tf.convert_to_tensor( value=[missing_frame_proto.SerializeToString()]), list_size=2, context_feature_spec=None, example_feature_spec={ "utility": EXAMPLE_FEATURE_SPEC["utility"] }) with tf.compat.v1.Session() as sess: sess.run(tf.compat.v1.local_variables_initializer()) with self.assertRaisesRegexp( tf.errors.InvalidArgumentError, r"Unexpected number of elements in feature utility"): sess.run(features)
def test_parse_from_sequence_example_with_small_list_size(self): with tf.Graph().as_default(): features = data_lib.parse_from_sequence_example( tf.convert_to_tensor(value=[ SEQ_EXAMPLE_PROTO_1.SerializeToString(), ]), list_size=1, context_feature_spec=CONTEXT_FEATURE_SPEC, example_feature_spec=EXAMPLE_FEATURE_SPEC) with tf.compat.v1.Session() as sess: sess.run(tf.compat.v1.local_variables_initializer()) feature_map = sess.run(features) self.assertEqual(sorted(feature_map), ["query_length", "unigrams", "utility"]) self.assertAllEqual(feature_map["unigrams"].dense_shape, [1, 1, 3]) self.assertAllEqual(feature_map["unigrams"].indices, [[0, 0, 0]]) self.assertAllEqual(feature_map["unigrams"].values, [b"tensorflow"]) self.assertAllEqual(feature_map["query_length"], [[3]]) self.assertAllEqual(feature_map["utility"], [[[0.]]]) # Check static shapes for dense tensors. self.assertAllEqual([1, 1], feature_map["query_length"].shape) self.assertAllEqual([1, 1, 1], feature_map["utility"].shape)
def test_parse_from_sequence_example(self): features = data_lib.parse_from_sequence_example( tf.convert_to_tensor(value=[ SEQ_EXAMPLE_PROTO_1.SerializeToString(), SEQ_EXAMPLE_PROTO_2.SerializeToString(), ]), context_feature_spec=CONTEXT_FEATURE_SPEC, example_feature_spec=EXAMPLE_FEATURE_SPEC) with tf.compat.v1.Session() as sess: sess.run(tf.compat.v1.local_variables_initializer()) feature_map = sess.run(features) self.assertEqual( sorted(feature_map), ["query_length", "unigrams", "utility"]) self.assertAllEqual(feature_map["unigrams"].dense_shape, [2, 2, 3]) self.assertAllEqual( feature_map["unigrams"].indices, [[0, 0, 0], [0, 1, 0], [0, 1, 1], [0, 1, 2], [1, 0, 0]]) self.assertAllEqual(feature_map["unigrams"].values, [b"tensorflow", b"learning", b"to", b"rank", b"gbdt"]) self.assertAllEqual(feature_map["query_length"], [[3], [2]]) self.assertAllEqual(feature_map["utility"], [[[0.], [1.]], [[0.], [-1.]]]) # Check static shapes for dense tensors. self.assertAllEqual([2, 1], features["query_length"].get_shape().as_list()) self.assertAllEqual([2, 2, 1], features["utility"].get_shape().as_list())
def test_parse_from_sequence_example(self): features = data_lib.parse_from_sequence_example( ops.convert_to_tensor([ SEQ_EXAMPLE_PROTO_1.SerializeToString(), SEQ_EXAMPLE_PROTO_2.SerializeToString(), ]), list_size=2, context_feature_spec=CONTEXT_FEATURE_SPEC, example_feature_spec=EXAMPLE_FEATURE_SPEC) with session.Session() as sess: sess.run(variables.local_variables_initializer()) queue_runner.start_queue_runners() feature_map = sess.run(features) self.assertEqual(sorted(feature_map), ["query_length", "unigrams", "utility"]) self.assertAllEqual(feature_map["unigrams"].dense_shape, [2, 2, 3]) self.assertAllEqual( feature_map["unigrams"].indices, [[0, 0, 0], [0, 1, 0], [0, 1, 1], [0, 1, 2], [1, 0, 0]]) self.assertAllEqual( feature_map["unigrams"].values, [b"tensorflow", b"learning", b"to", b"rank", b"gbdt"]) self.assertAllEqual(feature_map["query_length"], [[3], [2]]) self.assertAllEqual(feature_map["utility"], [[[0.], [1.]], [[0.], [0.]]])
def test_parse_from_sequence_example_with_sizes(self): with tf.Graph().as_default(): features = data_lib.parse_from_sequence_example( tf.convert_to_tensor(value=[ SEQ_EXAMPLE_PROTO_1.SerializeToString(), SEQ_EXAMPLE_PROTO_2.SerializeToString(), ]), context_feature_spec=CONTEXT_FEATURE_SPEC, example_feature_spec=EXAMPLE_FEATURE_SPEC, size_feature_name=_SIZE) with tf.compat.v1.Session() as sess: sess.run(tf.compat.v1.local_variables_initializer()) features = sess.run(features) self.assertAllEqual(features[_SIZE], [2, 1])