def test_encode_listwise_features(self): with tf.Graph().as_default(): # Batch size = 2, list_size = 2. features = { "query_length": tf.convert_to_tensor(value=[[1], [2]]), "utility": tf.convert_to_tensor(value=[[[1.0], [0.0]], [[0.0], [1.0]]]), "unigrams": tf.SparseTensor( indices=[[0, 0, 0], [0, 1, 0], [1, 0, 0], [1, 1, 0]], values=["ranking", "regression", "classification", "ordinal"], dense_shape=[2, 2, 1]) } context_feature_columns = { "query_length": feature_column.numeric_column( "query_length", shape=(1,), default_value=0, dtype=tf.int64) } example_feature_columns = { "utility": feature_column.numeric_column( "utility", shape=(1,), default_value=0.0, dtype=tf.float32), "unigrams": feature_column.embedding_column( feature_column.categorical_column_with_vocabulary_list( "unigrams", vocabulary_list=[ "ranking", "regression", "classification", "ordinal" ]), dimension=10) } with self.assertRaisesRegexp( ValueError, r"2nd dimension of tensor must be equal to input size: 3, but found .*" ): feature_lib.encode_listwise_features( features, input_size=3, context_feature_columns=context_feature_columns, example_feature_columns=example_feature_columns) context_features, example_features = feature_lib.encode_listwise_features( features, input_size=2, context_feature_columns=context_feature_columns, example_feature_columns=example_feature_columns) self.assertAllEqual(["query_length"], sorted(context_features)) self.assertAllEqual(["unigrams", "utility"], sorted(example_features)) self.assertAllEqual([2, 2, 10], example_features["unigrams"].get_shape().as_list()) with tf.compat.v1.Session() as sess: sess.run(tf.compat.v1.global_variables_initializer()) sess.run(tf.compat.v1.tables_initializer()) context_features, example_features = sess.run( [context_features, example_features]) self.assertAllEqual([[1], [2]], context_features["query_length"]) self.assertAllEqual([[[1.0], [0.0]], [[0.0], [1.0]]], example_features["utility"])
def _multiply_by_two_transform_fn(features, mode): for feature, tensor in six.iteritems(features): features[feature] = 2 * tensor context, example = feature_lib.encode_listwise_features( features=features, context_feature_columns=_context_feature_columns(), example_feature_columns=_example_feature_columns(), mode=mode) return context, example
def _transform_fn(features, mode): """Splits the features into context and per-example features.""" for k in features: print(features[k].shape) context_features, example_features = feature_lib.encode_listwise_features( features, input_size=input_size, context_feature_columns=context_feature_columns, example_feature_columns=example_feature_columns, mode=mode) for k in example_features: print(k) print(example_features[k].shape) for k in context_features: print(k) print(context_features[k].shape) return context_features, example_features
def _transform_fn(self, features, mode): """Defines the transform fn.""" if self._transform_function is not None: return self._transform_function(features=features, mode=mode) if mode == tf.estimator.ModeKeys.PREDICT: return feature.encode_pointwise_features( features=features, context_feature_columns=self._context_feature_columns, example_feature_columns=self._example_feature_columns, mode=mode, scope="transform_layer") else: return feature.encode_listwise_features( features=features, context_feature_columns=self._context_feature_columns, example_feature_columns=self._example_feature_columns, mode=mode, scope="transform_layer")