def testOneEpoch(self): with self.test_session() as sess: reader = tf.IdentityReader("test_reader") work_completed = reader.num_work_units_completed() produced = reader.num_records_produced() queue = tf.FIFOQueue(99, [tf.string], shapes=()) queued_length = queue.size() key, value = reader.read(queue) self.assertAllEqual(0, work_completed.eval()) self.assertAllEqual(0, produced.eval()) self.assertAllEqual(0, queued_length.eval()) queue.enqueue_many([["A", "B", "C"]]).run() queue.close().run() self.assertAllEqual(3, queued_length.eval()) self._ExpectRead(sess, key, value, b"A") self.assertAllEqual(1, produced.eval()) self._ExpectRead(sess, key, value, b"B") self._ExpectRead(sess, key, value, b"C") self.assertAllEqual(3, produced.eval()) self.assertAllEqual(0, queued_length.eval()) with self.assertRaisesOpError("is closed and has insufficient elements " "\\(requested 1, current size 0\\)"): sess.run([key, value]) self.assertAllEqual(3, work_completed.eval()) self.assertAllEqual(3, produced.eval()) self.assertAllEqual(0, queued_length.eval())
def testReset(self): with self.test_session() as sess: reader = tf.IdentityReader("test_reader") work_completed = reader.num_work_units_completed() produced = reader.num_records_produced() queue = tf.FIFOQueue(99, [tf.string], shapes=()) queued_length = queue.size() key, value = reader.read(queue) queue.enqueue_many([["X", "Y", "Z"]]).run() self._ExpectRead(sess, key, value, b"X") self.assertLess(0, queued_length.eval()) self.assertAllEqual(1, produced.eval()) self._ExpectRead(sess, key, value, b"Y") self.assertLess(0, work_completed.eval()) self.assertAllEqual(2, produced.eval()) reader.reset().run() self.assertAllEqual(0, work_completed.eval()) self.assertAllEqual(0, produced.eval()) self.assertAllEqual(1, queued_length.eval()) self._ExpectRead(sess, key, value, b"Z") queue.enqueue_many([["K", "L"]]).run() self._ExpectRead(sess, key, value, b"K")
def testSerializeRestore(self): with self.test_session() as sess: reader = tf.IdentityReader("test_reader") produced = reader.num_records_produced() queue = tf.FIFOQueue(99, [tf.string], shapes=()) queue.enqueue_many([["X", "Y", "Z"]]).run() key, value = reader.read(queue) self._ExpectRead(sess, key, value, b"X") self.assertAllEqual(1, produced.eval()) state = reader.serialize_state().eval() self._ExpectRead(sess, key, value, b"Y") self._ExpectRead(sess, key, value, b"Z") self.assertAllEqual(3, produced.eval()) queue.enqueue_many([["Y", "Z"]]).run() queue.close().run() reader.restore_state(state).run() self.assertAllEqual(1, produced.eval()) self._ExpectRead(sess, key, value, b"Y") self._ExpectRead(sess, key, value, b"Z") with self.assertRaisesOpError( "is closed and has insufficient elements " "\\(requested 1, current size 0\\)"): sess.run([key, value]) self.assertAllEqual(3, produced.eval()) self.assertEqual(bytes, type(state)) with self.assertRaises(ValueError): reader.restore_state([]) with self.assertRaises(ValueError): reader.restore_state([state, state]) with self.assertRaisesOpError( "Could not parse state for IdentityReader 'test_reader'"): reader.restore_state(state[1:]).run() with self.assertRaisesOpError( "Could not parse state for IdentityReader 'test_reader'"): reader.restore_state(state[:-1]).run() with self.assertRaisesOpError( "Could not parse state for IdentityReader 'test_reader'"): reader.restore_state(state + b"ExtraJunk").run() with self.assertRaisesOpError( "Could not parse state for IdentityReader 'test_reader'"): reader.restore_state(b"PREFIX" + state).run() with self.assertRaisesOpError( "Could not parse state for IdentityReader 'test_reader'"): reader.restore_state(b"BOGUS" + state[5:]).run()
def testReadUpTo(self): # Note that this just tests the default ReaderReadUpTo # since it is not overriden for IdentityReader. with self.test_session() as sess: reader = tf.IdentityReader("test_reader") queue = tf.FIFOQueue(99, [tf.string], shapes=()) keys, values = reader.read_up_to(queue, 3) queue.enqueue_many([["A", "B", "C"]]).run() k, v = sess.run([keys, values]) self.assertAllEqual([b"A", b"B", b"C"], k) self.assertAllEqual([b"A", b"B", b"C"], v)
def read_op(csv_records, size, num_categories, shuffle=True): try: queue = tf.train.string_input_producer(csv_records, shuffle=shuffle) reader = tf.IdentityReader() _, record = reader.read(queue) path, label = tf.decode_csv(record, [[''], [0]]) image_raw = tf.read_file(path) image = tf.image.decode_jpeg(image_raw, channels=3) image = tf.image.resize_images(image, size) label = tf.one_hot(label, num_categories) x = image y = label except: print(format_exception(*sys.exc_info())) return x, y
def testMultipleEpochs(self): with self.test_session() as sess: reader = tf.IdentityReader("test_reader") queue = tf.FIFOQueue(99, [tf.string], shapes=()) enqueue = queue.enqueue_many([["DD", "EE"]]) key, value = reader.read(queue) enqueue.run() self._ExpectRead(sess, key, value, b"DD") self._ExpectRead(sess, key, value, b"EE") enqueue.run() self._ExpectRead(sess, key, value, b"DD") self._ExpectRead(sess, key, value, b"EE") enqueue.run() self._ExpectRead(sess, key, value, b"DD") self._ExpectRead(sess, key, value, b"EE") queue.close().run() with self.assertRaisesOpError("is closed and has insufficient elements " "\\(requested 1, current size 0\\)"): sess.run([key, value])