Exemplo n.º 1
0
  def testReadFromSameFile(self):
    with self.cached_session() as sess:
      reader1 = io_ops.LMDBReader(name="test_read_from_same_file1")
      reader2 = io_ops.LMDBReader(name="test_read_from_same_file2")
      filename_queue = input_lib.string_input_producer(
          [self.db_path], num_epochs=None)
      key1, value1 = reader1.read(filename_queue)
      key2, value2 = reader2.read(filename_queue)

      coord = coordinator.Coordinator()
      threads = queue_runner_impl.start_queue_runners(sess, coord=coord)
      for _ in range(3):
        for _ in range(10):
          k1, v1, k2, v2 = self.evaluate([key1, value1, key2, value2])
          self.assertAllEqual(compat.as_bytes(k1), compat.as_bytes(k2))
          self.assertAllEqual(compat.as_bytes(v1), compat.as_bytes(v2))
      coord.request_stop()
      coord.join(threads)
Exemplo n.º 2
0
  def testReadFromFolder(self):
    reader = io_ops.LMDBReader(name="test_read_from_folder")
    queue = data_flow_ops.FIFOQueue(99, [dtypes.string], shapes=())
    key, value = reader.read(queue)

    self.evaluate(queue.enqueue([self.db_path]))
    self.evaluate(queue.close())
    for i in range(10):
      k, v = self.evaluate([key, value])
      self.assertAllEqual(compat.as_bytes(k), compat.as_bytes(str(i)))
      self.assertAllEqual(
          compat.as_bytes(v), compat.as_bytes(str(chr(ord("a") + i))))

    with self.assertRaisesOpError("is closed and has insufficient elements "
                                  "\\(requested 1, current size 0\\)"):
      k, v = self.evaluate([key, value])
Exemplo n.º 3
0
  def testReadFromFileRepeatedly(self):
    with self.cached_session() as sess:
      reader = io_ops.LMDBReader(name="test_read_from_file_repeated")
      filename_queue = input_lib.string_input_producer(
          [self.db_path], num_epochs=None)
      key, value = reader.read(filename_queue)

      coord = coordinator.Coordinator()
      threads = queue_runner_impl.start_queue_runners(sess, coord=coord)
      # Iterate over the lmdb 3 times.
      for _ in range(3):
        # Go over all 10 records each time.
        for j in range(10):
          k, v = self.evaluate([key, value])
          self.assertAllEqual(compat.as_bytes(k), compat.as_bytes(str(j)))
          self.assertAllEqual(
              compat.as_bytes(v), compat.as_bytes(str(chr(ord("a") + j))))
      coord.request_stop()
      coord.join(threads)
Exemplo n.º 4
0
    def testReadFromFolder(self):
        with self.test_session() as sess:
            reader = io_ops.LMDBReader(name="test_read_from_folder")
            path = os.path.join("tensorflow", "core", "lib", "lmdb",
                                "testdata")
            queue = data_flow_ops.FIFOQueue(99, [dtypes.string], shapes=())
            key, value = reader.read(queue)

            queue.enqueue([path]).run()
            queue.close().run()
            for i in range(10):
                k, v = sess.run([key, value])
                self.assertAllEqual(compat.as_bytes(k),
                                    compat.as_bytes(str(i)))
                self.assertAllEqual(compat.as_bytes(v),
                                    compat.as_bytes(str(chr(ord('a') + i))))

            with self.assertRaisesOpError(
                    "is closed and has insufficient elements "
                    "\\(requested 1, current size 0\\)"):
                k, v = sess.run([key, value])
Exemplo n.º 5
0
import os
import tensorflow as tf
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import variables
from tqdm import tqdm

prefix_path = "/home/liudanny/git/caffe-demos/mnist"
batch_size = 64

with tf.Session() as sess:
    path = os.path.join(prefix_path, "mnist_train_lmdb", "data.mdb")
    #print(path)
    reader = io_ops.LMDBReader()
    queue = data_flow_ops.FIFOQueue(200, [dtypes.string], shapes=())
    #key, value = reader.read(queue)
    key, value = reader.read_up_to(queue, batch_size)
    queue.enqueue([path]).run()
    queue.close().run()
    with tqdm(total=500, leave=True, smoothing=0.2) as pbar:
        for i in range(1, 501):
            k, v = sess.run([key, value])
            #print(k, v)
            pbar.update()