def testMultipleCallsToEvalInputReceiver(self): graph = tf.Graph() features1 = {'apple': tf.constant(1.0), 'banana': tf.constant(2.0)} labels1 = tf.constant(3.0) receiver_tensors1 = {'examples': tf.placeholder(tf.string)} features2 = {'cherry': tf.constant(3.0)} labels2 = {'alpha': tf.constant(4.0), 'bravo': tf.constant(5.0)} receiver_tensors2 = {'examples': tf.placeholder(tf.string)} with graph.as_default(): export.EvalInputReceiver(features=features1, labels=labels1, receiver_tensors=receiver_tensors1) feature_keys_collection_name = encoding.with_suffix( encoding.FEATURES_COLLECTION, encoding.KEY_SUFFIX) feature_nodes_collection_name = encoding.with_suffix( encoding.FEATURES_COLLECTION, encoding.NODE_SUFFIX) label_keys_collection_name = encoding.with_suffix( encoding.LABELS_COLLECTION, encoding.KEY_SUFFIX) label_nodes_collection_name = encoding.with_suffix( encoding.LABELS_COLLECTION, encoding.NODE_SUFFIX) self.assertEqual( 2, len(tf.get_collection(feature_keys_collection_name))) self.assertEqual( 2, len(tf.get_collection(feature_nodes_collection_name))) self.assertEqual( 1, len(tf.get_collection(label_keys_collection_name))) self.assertEqual( 1, len(tf.get_collection(label_nodes_collection_name))) self.assertEqual( 1, len(tf.get_collection(encoding.EXAMPLE_REF_COLLECTION))) self.assertEqual( 1, len(tf.get_collection(encoding.TFMA_VERSION_COLLECTION))) # Call again with a different set of features, labels and receiver # tensors, check that the latest call overrides the earlier one. # # Note that we only check the lengths of some collections: more detailed # checks would require the test to include more knowledge about the # details of how exporting is done. export.EvalInputReceiver(features=features2, labels=labels2, receiver_tensors=receiver_tensors2) self.assertEqual( 1, len(tf.get_collection(feature_keys_collection_name))) self.assertEqual( 1, len(tf.get_collection(feature_nodes_collection_name))) self.assertEqual( 2, len(tf.get_collection(label_keys_collection_name))) self.assertEqual( 2, len(tf.get_collection(label_nodes_collection_name))) self.assertEqual( 1, len(tf.get_collection(encoding.EXAMPLE_REF_COLLECTION))) self.assertEqual( 1, len(tf.get_collection(encoding.TFMA_VERSION_COLLECTION)))
def eval_input_receiver_fn(): """Eval input receiver function.""" serialized_tf_example = tf.compat.v1.placeholder( dtype=tf.string, shape=[None], name='input_example_tensor') language = tf.feature_column.categorical_column_with_vocabulary_list( 'language', ['english', 'chinese', 'other']) age = tf.feature_column.numeric_column('age') english_label = tf.feature_column.numeric_column('english_label') chinese_label = tf.feature_column.numeric_column('chinese_label') other_label = tf.feature_column.numeric_column('other_label') all_features = [ age, language, english_label, chinese_label, other_label ] feature_spec = tf.feature_column.make_parse_example_spec(all_features) receiver_tensors = {'examples': serialized_tf_example} features = tf.io.parse_example(serialized=serialized_tf_example, features=feature_spec) labels = { 'english_head': features['english_label'], 'chinese_head': features['chinese_label'], 'other_head': features['other_label'], } return export.EvalInputReceiver(features=features, receiver_tensors=receiver_tensors, labels=labels)
def eval_input_receiver_fn(): """Eval input receiver function.""" serialized_tf_example = tf.placeholder( dtype=tf.string, shape=[None], name='input_example_tensor') language = tf.contrib.layers.sparse_column_with_keys( 'language', ['english', 'chinese', 'other']) age = tf.contrib.layers.real_valued_column('age') english_label = tf.contrib.layers.real_valued_column('english_label') chinese_label = tf.contrib.layers.real_valued_column('chinese_label') other_label = tf.contrib.layers.real_valued_column('other_label') all_features = [age, language, english_label, chinese_label, other_label] feature_spec = tf.contrib.layers.create_feature_spec_for_parsing( all_features) receiver_tensors = {'examples': serialized_tf_example} features = tf.parse_example(serialized_tf_example, feature_spec) labels = { 'english_head': features['english_label'], 'chinese_head': features['chinese_label'], 'other_head': features['other_label'], } return export.EvalInputReceiver( features=features, receiver_tensors=receiver_tensors, labels=labels)
def testEvalInputReceiverReceiverTensorKeyCheck(self): with self.assertRaisesRegexp(ValueError, 'exactly one key named examples'): export.EvalInputReceiver( features={}, labels={}, receiver_tensors={'bad_key': tf.constant(0.0)})
def eval_input_receiver_fn(): """Eval input receiver function.""" csv_row = tf.placeholder(dtype=tf.string, shape=[None], name='input_csv_row') features = parse_csv(csv_row) receiver_tensors = {'examples': csv_row} return export.EvalInputReceiver(features=features, receiver_tensors=receiver_tensors, labels=features['label'])
def _eval_input_receiver_fn(): """Eval input receiver function.""" csv_row = tf.placeholder(dtype=tf.string, shape=[None], name='input_csv_row') features = _parse_csv(csv_row) receiver_tensors = {'examples': csv_row} # the constructor of EvalInputReceiver() has side effects (populating some # TF collections). Calling twice here to make sure the collisions are handled # correctly. export.EvalInputReceiver(features=features, labels=features['input_index'], receiver_tensors=receiver_tensors, example_ref=features['input_index']) return export.EvalInputReceiver(features=features, labels=features['input_index'], receiver_tensors=receiver_tensors, example_ref=features['input_index'])
def _bad_eval_input_receiver_fn_out_of_range_input_refs(): """A bad eval input receiver function (input_refs has out-of-range index).""" csv_row = tf.compat.v1.placeholder(dtype=tf.string, shape=[None], name='input_csv_row') features = _parse_csv(csv_row) receiver_tensors = {'examples': csv_row} return export.EvalInputReceiver(features=features, labels=features['input_index'], receiver_tensors=receiver_tensors, input_refs=features['input_index'] + 1)
def eval_input_receiver_fn(): """Eval input receiver function.""" serialized_tf_example = tf.placeholder(dtype=tf.string, shape=[None], name='input_example_tensor') receiver_tensors = {'examples': serialized_tf_example} features = tf.parse_example(serialized_tf_example, label_feature_spec) _make_embedding_and_sparse_values(features) return export.EvalInputReceiver(features=features, receiver_tensors=receiver_tensors, labels=features['label'])
def eval_input_receiver_fn(): """An input_fn that expects a serialized tf.Example.""" serialized_tf_example = tf.placeholder(dtype=tf.string, shape=[None], name='input_example_tensor') features = tf.parse_example(serialized_tf_example, eval_feature_spec) labels = { 'actual_label': features['label'], 'var_int': features['var_int'] } return export.EvalInputReceiver( features=features, labels=labels, receiver_tensors={'examples': serialized_tf_example})
def _bad_eval_input_receiver_fn_misaligned_input_refs(): """A bad eval input receiver function capturing a misaligned input_refs.""" csv_row = tf.compat.v1.placeholder( dtype=tf.string, shape=[None], name='input_csv_row') features = _parse_csv(csv_row) receiver_tensors = {'examples': csv_row} return export.EvalInputReceiver( features=features, labels=features['input_index'], receiver_tensors=receiver_tensors, input_refs=tf.concat( [features['input_index'], tf.constant([0], dtype=tf.int32)], axis=0))
def eval_input_receiver_fn(): """Eval input receiver function.""" serialized_tf_example = tf.compat.v1.placeholder( dtype=tf.string, shape=[None], name='input_example_tensor') feature_spec = { 'age': tf.io.FixedLenFeature([1], dtype=tf.float32), 'label': tf.io.FixedLenFeature([1], dtype=tf.float32) } receiver_tensors = {'examples': serialized_tf_example} features = tf.io.parse_example(serialized=serialized_tf_example, features=feature_spec) return export.EvalInputReceiver(features=features, labels=features['label'], receiver_tensors=receiver_tensors)
def _eval_input_receiver_using_iterator_fn(): """Eval input receiver function using an iterator.""" csv_row = tf.compat.v1.placeholder(dtype=tf.string, shape=[None], name='input_csv_row') iterator = tf.compat.v1.data.make_initializable_iterator( tf.compat.v1.data.Dataset.from_tensors(csv_row)) features = _parse_csv(iterator.get_next()) receiver_tensors = {'examples': csv_row} return export.EvalInputReceiver( features=features, labels=features['input_index'], receiver_tensors=receiver_tensors, input_refs=features['input_index'], iterator_initializer=iterator.initializer.name)
def eval_input_receiver_fn(): """An input_fn that expects a serialized tf.Example.""" # Note it's *required* that the batch size should be variable for TFMA. serialized_tf_example = tf.compat.v1.placeholder( dtype=tf.string, shape=[None], name='input_example_tensor') features = tf.io.parse_example(serialized=serialized_tf_example, features=feature_spec) labels = None if label_key is None else features[label_key] if isinstance(labels, tf.SparseTensor): # This bit here is why a custom eval_input_receiver_fn is specified. labels = tf.sparse.to_dense(labels, default_value=-1) return export.EvalInputReceiver( features=features, labels=labels, receiver_tensors={'examples': serialized_tf_example})
def eval_input_receiver_fn(): """Eval input receiver function.""" serialized_tf_example = tf.placeholder( dtype=tf.string, shape=[None], name='input_example_tensor') animals = tf.contrib.layers.sparse_column_with_keys('animals', ['bird', 'cat', 'dog']) label = tf.contrib.layers.real_valued_column('label') all_features = [animals, label] feature_spec = tf.contrib.layers.create_feature_spec_for_parsing( all_features) receiver_tensors = {'examples': serialized_tf_example} features = tf.parse_example(serialized_tf_example, feature_spec) return export.EvalInputReceiver( features=features, receiver_tensors=receiver_tensors, labels=features['label'])
def eval_input_receiver_fn(): """Eval input receiver function.""" serialized_tf_example = tf.placeholder(dtype=tf.string, shape=[None], name='input_example_tensor') feature_spec = { 'prediction': tf.FixedLenFeature([1], dtype=tf.float32), 'label': tf.FixedLenFeature([1], dtype=tf.float32), 'fixed_float': tf.FixedLenFeature([1], dtype=tf.float32), 'fixed_string': tf.FixedLenFeature([1], dtype=tf.string), 'var_float': tf.VarLenFeature(dtype=tf.float32), 'var_string': tf.VarLenFeature(dtype=tf.string) } receiver_tensors = {'examples': serialized_tf_example} features = tf.parse_example(serialized_tf_example, feature_spec) return export.EvalInputReceiver(features=features, receiver_tensors=receiver_tensors, labels=features['label'])
def eval_input_receiver_fn(): """Eval input receiver function.""" serialized_tf_example = tf.placeholder(dtype=tf.string, shape=[None], name='input_example_tensor') language = tf.contrib.layers.sparse_column_with_keys( 'language', ['english', 'chinese']) slice_key = tf.contrib.layers.sparse_column_with_hash_bucket( 'slice_key', 100) age = tf.contrib.layers.real_valued_column('age') label = tf.contrib.layers.real_valued_column('label') all_features = [age, language, label, slice_key] feature_spec = tf.contrib.layers.create_feature_spec_for_parsing( all_features) receiver_tensors = {'examples': serialized_tf_example} features = tf.parse_example(serialized_tf_example, feature_spec) return export.EvalInputReceiver(features=features, receiver_tensors=receiver_tensors, labels=features['label'])