def test_assert_ops_in_graph(self): with self.test_session(): constant_op.constant(["hello", "taffy"], name="hello") test_util.assert_ops_in_graph({"hello": "Const"}, ops.get_default_graph()) self.assertRaises(ValueError, test_util.assert_ops_in_graph, {"bye": "Const"}, ops.get_default_graph()) self.assertRaises(ValueError, test_util.assert_ops_in_graph, {"hello": "Variable"}, ops.get_default_graph())
def test_assert_ops_in_graph(self): with ops.Graph().as_default(): constant_op.constant(["hello", "taffy"], name="hello") test_util.assert_ops_in_graph({"hello": "Const"}, ops.get_default_graph()) self.assertRaises(ValueError, test_util.assert_ops_in_graph, {"bye": "Const"}, ops.get_default_graph()) self.assertRaises(ValueError, test_util.assert_ops_in_graph, {"hello": "Variable"}, ops.get_default_graph())
def test_read_text_lines_multifile_with_shared_queue(self): gfile.Glob = self._orig_glob filenames = self._create_sorted_temp_files(["ABC\n", "DEF\nGHK\n"]) batch_size = 1 queue_capacity = 5 name = "my_batch" with ops.Graph().as_default() as g, self.test_session( graph=g) as session: keys, inputs = _read_keyed_batch_examples_shared_queue( filenames, batch_size, reader=io_ops.TextLineReader, randomize_input=False, num_epochs=1, queue_capacity=queue_capacity, name=name) self.assertAllEqual((None, ), keys.get_shape().as_list()) self.assertAllEqual((None, ), inputs.get_shape().as_list()) session.run([ variables.local_variables_initializer(), variables.global_variables_initializer() ]) coord = coordinator.Coordinator() threads = queue_runner_impl.start_queue_runners(session, coord=coord) self.assertEqual("%s:1" % name, inputs.name) example_queue_name = "%s/fifo_queue" % name worker_file_name_queue_name = "%s/file_name_queue/fifo_queue" % name test_util.assert_ops_in_graph( { "%s/read/TextLineReaderV2" % name: "TextLineReaderV2", example_queue_name: "FIFOQueueV2", worker_file_name_queue_name: "FIFOQueueV2", name: "QueueDequeueUpToV2" }, g) self.assertAllEqual(session.run(inputs), [b"ABC"]) self.assertAllEqual(session.run(inputs), [b"DEF"]) self.assertAllEqual(session.run(inputs), [b"GHK"]) with self.assertRaises(errors.OutOfRangeError): session.run(inputs) coord.request_stop() coord.join(threads)
def test_batch_randomized(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" with ops.Graph().as_default() as g, self.test_session(graph=g) as sess: inputs = graph_io.read_batch_examples( _VALID_FILE_PATTERN, batch_size, reader=io_ops.TFRecordReader, randomize_input=True, queue_capacity=queue_capacity, name=name) self.assertAllEqual((batch_size,), inputs.get_shape().as_list()) self.assertEqual("%s:1" % name, inputs.name) file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/random_shuffle_queue" % name op_nodes = test_util.assert_ops_in_graph({ file_names_name: "Const", file_name_queue_name: "FIFOQueue", "%s/read/TFRecordReader" % name: "TFRecordReader", example_queue_name: "RandomShuffleQueue", name: "QueueDequeueMany" }, g) self.assertEqual( set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0])) self.assertEqual(queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_one_epoch(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" with tf.Graph().as_default() as g, self.test_session(graph=g) as sess: inputs = tf.contrib.learn.io.read_batch_examples( _VALID_FILE_PATTERN, batch_size, reader=tf.TFRecordReader, randomize_input=True, num_epochs=1, queue_capacity=queue_capacity, name=name) self.assertAllEqual((None,), inputs.get_shape().as_list()) self.assertEqual("%s:1" % name, inputs.name) file_name_queue_name = "%s/file_name_queue" % name file_name_queue_limit_name = ( "%s/limit_epochs/epochs" % file_name_queue_name) file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/random_shuffle_queue" % name op_nodes = test_util.assert_ops_in_graph({ file_names_name: "Const", file_name_queue_name: "FIFOQueue", "%s/read/TFRecordReader" % name: "TFRecordReader", example_queue_name: "RandomShuffleQueue", name: "QueueDequeueUpTo", file_name_queue_limit_name: "VariableV2" }, g) self.assertEqual( set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0])) self.assertEqual( queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_batch_randomized(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" with tf.Graph().as_default() as g, self.test_session(graph=g) as sess: inputs = tf.contrib.learn.io.read_batch_examples( _VALID_FILE_PATTERN, batch_size, reader=tf.TFRecordReader, randomize_input=True, queue_capacity=queue_capacity, name=name) self.assertEqual("%s:1" % name, inputs.name) file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/random_shuffle_queue" % name op_nodes = test_util.assert_ops_in_graph({ file_names_name: "Const", file_name_queue_name: "FIFOQueue", "%s/read/TFRecordReader" % name: "TFRecordReader", example_queue_name: "RandomShuffleQueue", name: "QueueDequeueMany" }, g) self.assertEqual( set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0])) self.assertEqual( queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_batch_record_features(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" features = {"feature": tf.FixedLenFeature(shape=[0], dtype=tf.float32)} with tf.Graph().as_default() as g, self.test_session(graph=g) as sess: features = tf.contrib.learn.io.read_batch_record_features( _VALID_FILE_PATTERN, batch_size, features, randomize_input=False, queue_capacity=queue_capacity, reader_num_threads=2, parser_num_threads=2, name=name) self.assertEqual("%s/parse_example_batch_join:1" % name, features["feature"].name) file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/fifo_queue" % name parse_example_queue_name = "%s/parse_example_batch_join" % name op_nodes = test_util.assert_ops_in_graph({ file_names_name: "Const", file_name_queue_name: "FIFOQueue", "%s/read/TFRecordReader" % name: "TFRecordReader", example_queue_name: "FIFOQueue", parse_example_queue_name: "QueueDequeueMany", name: "QueueDequeueMany" }, g) self.assertAllEqual(_FILE_NAMES, sess.run(["%s:0" % file_names_name])[0]) self.assertEqual( queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_batch_record_features(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" features = {"feature": tf.FixedLenFeature(shape=[0], dtype=tf.float32)} with tf.Graph().as_default() as g, self.test_session(graph=g) as sess: features = tf.contrib.learn.io.read_batch_record_features( _VALID_FILE_PATTERN, batch_size, features, randomize_input=False, queue_capacity=queue_capacity, reader_num_threads=2, parser_num_threads=2, name=name) self.assertEquals("%s/parse_example_batch_join:0" % name, features["feature"].name) file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/fifo_queue" % name parse_example_queue_name = "%s/parse_example_batch_join" % name op_nodes = test_util.assert_ops_in_graph({ file_names_name: "Const", file_name_queue_name: "FIFOQueue", "%s/read/TFRecordReader" % name: "TFRecordReader", example_queue_name: "FIFOQueue", parse_example_queue_name: "QueueDequeueMany", name: "QueueDequeueMany" }, g) self.assertAllEqual(_FILE_NAMES, sess.run(["%s:0" % file_names_name])[0]) self.assertEqual( queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_one_epoch(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" with ops.Graph().as_default() as g, self.test_session(graph=g) as sess: inputs = graph_io.read_batch_examples( _VALID_FILE_PATTERN, batch_size, reader=io_ops.TFRecordReader, randomize_input=True, num_epochs=1, queue_capacity=queue_capacity, name=name) self.assertAllEqual((None, ), inputs.get_shape().as_list()) self.assertEqual("%s:1" % name, inputs.name) file_name_queue_name = "%s/file_name_queue" % name file_name_queue_limit_name = ("%s/limit_epochs/epochs" % file_name_queue_name) file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/random_shuffle_queue" % name op_nodes = test_util.assert_ops_in_graph( { file_names_name: "Const", file_name_queue_name: "FIFOQueueV2", "%s/read/TFRecordReaderV2" % name: "TFRecordReaderV2", example_queue_name: "RandomShuffleQueueV2", name: "QueueDequeueUpToV2", file_name_queue_limit_name: "VariableV2" }, g) self.assertEqual(set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0])) self.assertEqual(queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_batch_randomized_multiple_globs(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" with ops.Graph().as_default() as g, self.session(graph=g) as sess: inputs = graph_io.read_batch_examples( [_VALID_FILE_PATTERN, _VALID_FILE_PATTERN_2], batch_size, reader=io_ops.TFRecordReader, randomize_input=True, queue_capacity=queue_capacity, name=name) self.assertAllEqual((batch_size, ), inputs.get_shape().as_list()) self.assertEqual("%s:1" % name, inputs.name) file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/random_shuffle_queue" % name op_nodes = test_util.assert_ops_in_graph( { file_names_name: "Const", file_name_queue_name: "FIFOQueueV2", "%s/read/TFRecordReaderV2" % name: "TFRecordReaderV2", example_queue_name: "RandomShuffleQueueV2", name: "QueueDequeueManyV2" }, g) self.assertEqual(set(_FILE_NAMES + _FILE_NAMES_2), set(sess.run(["%s:0" % file_names_name])[0])) self.assertEqual(queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_read_text_lines_multifile_with_shared_queue(self): gfile.Glob = self._orig_glob filenames = self._create_sorted_temp_files(["ABC\n", "DEF\nGHK\n"]) batch_size = 1 queue_capacity = 5 name = "my_batch" with tf.Graph().as_default() as g, self.test_session( graph=g) as session: _, inputs = _read_keyed_batch_examples_shared_queue( filenames, batch_size, reader=tf.TextLineReader, randomize_input=False, num_epochs=1, queue_capacity=queue_capacity, name=name) session.run(tf.initialize_local_variables()) coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(session, coord=coord) self.assertEqual("%s:1" % name, inputs.name) shared_file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % shared_file_name_queue_name example_queue_name = "%s/fifo_queue" % name worker_file_name_queue_name = "%s/file_name_queue/fifo_queue" % name test_util.assert_ops_in_graph( { file_names_name: "Const", shared_file_name_queue_name: "FIFOQueue", "%s/read/TextLineReader" % name: "TextLineReader", example_queue_name: "FIFOQueue", worker_file_name_queue_name: "FIFOQueue", name: "QueueDequeueUpTo" }, g) self.assertAllEqual(session.run(inputs), [b"ABC"]) self.assertAllEqual(session.run(inputs), [b"DEF"]) self.assertAllEqual(session.run(inputs), [b"GHK"]) with self.assertRaises(errors.OutOfRangeError): session.run(inputs) coord.request_stop() coord.join(threads)
def test_read_text_lines_multifile_with_shared_queue(self): gfile.Glob = self._orig_glob filenames = self._create_sorted_temp_files(["ABC\n", "DEF\nGHK\n"]) batch_size = 1 queue_capacity = 5 name = "my_batch" with tf.Graph().as_default() as g, self.test_session(graph=g) as session: keys, inputs = _read_keyed_batch_examples_shared_queue( filenames, batch_size, reader=tf.TextLineReader, randomize_input=False, num_epochs=1, queue_capacity=queue_capacity, name=name) self.assertAllEqual((None,), keys.get_shape().as_list()) self.assertAllEqual((None,), inputs.get_shape().as_list()) session.run(tf.local_variables_initializer()) coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(session, coord=coord) self.assertEqual("%s:1" % name, inputs.name) shared_file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % shared_file_name_queue_name example_queue_name = "%s/fifo_queue" % name worker_file_name_queue_name = "%s/file_name_queue/fifo_queue" % name test_util.assert_ops_in_graph({ file_names_name: "Const", shared_file_name_queue_name: "FIFOQueue", "%s/read/TextLineReader" % name: "TextLineReader", example_queue_name: "FIFOQueue", worker_file_name_queue_name: "FIFOQueue", name: "QueueDequeueUpTo" }, g) self.assertAllEqual(session.run(inputs), [b"ABC"]) self.assertAllEqual(session.run(inputs), [b"DEF"]) self.assertAllEqual(session.run(inputs), [b"GHK"]) with self.assertRaises(errors.OutOfRangeError): session.run(inputs) coord.request_stop() coord.join(threads)
def testIdTableWithHashBucketsWithMultipleInitializers(self): vocab_file = self._createVocabFile("feat_to_id_4.txt") with self.test_session() as sess: default_value = -1 vocab_size = 3 oov_buckets = 3 vocab_table = lookup_ops.HashTable( lookup_ops.TextFileIdTableInitializer( vocab_file, vocab_size=vocab_size), default_value) table1 = lookup_ops.IdTableWithHashBuckets( vocab_table, oov_buckets, hasher_spec=lookup_ops.FastHashSpec, name="table1") table2 = lookup_ops.IdTableWithHashBuckets( vocab_table, oov_buckets, hasher_spec=lookup_ops.StrongHashSpec((1, 2)), name="table2") lookup_ops.tables_initializer().run() input_string = constant_op.constant( ["fruit", "brain", "salad", "surgery", "UNK"]) out1 = table1.lookup(input_string) out2 = table2.lookup(input_string) out1, out2 = sess.run([out1, out2]) self.assertAllEqual([5, 0, 1, 2, 5], out1) self.assertAllEqual([5, 0, 1, 2, 3], out2) self.assertEquals(vocab_size + oov_buckets, table1.size().eval()) self.assertEquals(vocab_size + oov_buckets, table2.size().eval()) test_util.assert_ops_in_graph({ "table1_Lookup/hash_bucket": "StringToHashBucketFast", "table2_Lookup/hash_bucket": "StringToHashBucketStrong", }, sess.graph)
def test_batch_record_features(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" shape = (0, ) features = { "feature": parsing_ops.FixedLenFeature(shape=shape, dtype=dtypes_lib.float32) } with ops.Graph().as_default() as g, self.test_session(graph=g) as sess: features = graph_io.read_batch_record_features( _VALID_FILE_PATTERN, batch_size, features, randomize_input=False, queue_capacity=queue_capacity, reader_num_threads=2, name=name) self.assertTrue("feature" in features, "'feature' missing from %s." % features.keys()) feature = features["feature"] self.assertEqual("%s/fifo_queue_1_Dequeue:0" % name, feature.name) self.assertAllEqual((batch_size, ) + shape, feature.get_shape().as_list()) file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/fifo_queue" % name parse_example_queue_name = "%s/fifo_queue" % name op_nodes = test_util.assert_ops_in_graph( { file_names_name: "Const", file_name_queue_name: "FIFOQueueV2", "%s/read/TFRecordReaderV2" % name: "TFRecordReaderV2", example_queue_name: "FIFOQueueV2", parse_example_queue_name: "FIFOQueueV2", name: "QueueDequeueManyV2" }, g) self.assertAllEqual(_FILE_NAMES, sess.run(["%s:0" % file_names_name])[0]) self.assertEqual(queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_batch_record_features(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" shape = (0,) features = { "feature": parsing_ops.FixedLenFeature( shape=shape, dtype=dtypes_lib.float32) } with ops.Graph().as_default() as g, self.test_session(graph=g) as sess: features = graph_io.read_batch_record_features( _VALID_FILE_PATTERN, batch_size, features, randomize_input=False, queue_capacity=queue_capacity, reader_num_threads=2, name=name) self.assertTrue("feature" in features, "'feature' missing from %s." % features.keys()) feature = features["feature"] self.assertEqual("%s/fifo_queue_1_Dequeue:0" % name, feature.name) self.assertAllEqual((batch_size,) + shape, feature.get_shape().as_list()) file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/fifo_queue" % name parse_example_queue_name = "%s/fifo_queue" % name op_nodes = test_util.assert_ops_in_graph({ file_names_name: "Const", file_name_queue_name: "FIFOQueue", "%s/read/TFRecordReader" % name: "TFRecordReader", example_queue_name: "FIFOQueue", parse_example_queue_name: "FIFOQueue", name: "QueueDequeueMany" }, g) self.assertAllEqual(_FILE_NAMES, sess.run(["%s:0" % file_names_name])[0]) self.assertEqual(queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_batch_tf_record(self): batch_size = 17 queue_capacity = 1234 name = "my_batch" with tf.Graph().as_default() as g, self.test_session(graph=g) as sess: inputs = tf.contrib.learn.io.read_batch_examples( _VALID_FILE_PATTERN, batch_size, reader=tf.TFRecordReader, randomize_input=False, queue_capacity=queue_capacity, name=name) self.assertEquals("%s:0" % name, inputs.name) file_name_queue_name = "%s/file_name_queue" % name file_names_name = "%s/input" % file_name_queue_name example_queue_name = "%s/fifo_queue" % name op_nodes = test_util.assert_ops_in_graph({ file_names_name: "Const", file_name_queue_name: "FIFOQueue", "%s/read/TFRecordReader" % name: "TFRecordReader", example_queue_name: "FIFOQueue", name: "QueueDequeueMany" }, g) self.assertAllEqual(_FILE_NAMES, sess.run(["%s:0" % file_names_name])[0]) self.assertEqual( queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)