Пример #1
0
    def testNoDeadlockFromQueue(self):
        """Tests that reading does not block main execution threads."""
        config = config_pb2.ConfigProto(inter_op_parallelism_threads=1,
                                        intra_op_parallelism_threads=1)
        with self.session(config=config) as sess:
            thread_data_t = collections.namedtuple(
                "thread_data_t", ["thread", "queue", "output"])
            thread_data = []

            # Create different readers, each with its own queue.
            for i in range(3):
                queue = data_flow_ops.FIFOQueue(99, [dtypes.string], shapes=())
                reader = io_ops.TextLineReader()
                _, line = reader.read(queue)
                output = []
                t = threading.Thread(target=AsyncReaderTest._RunSessionAndSave,
                                     args=(sess, [line], output))
                thread_data.append(thread_data_t(t, queue, output))

            # Start all readers. They are all blocked waiting for queue entries.
            sess.run(variables.global_variables_initializer())
            for d in thread_data:
                d.thread.start()

            # Unblock the readers.
            for i, d in enumerate(reversed(thread_data)):
                fname = os.path.join(self.get_temp_dir(),
                                     "deadlock.%s.txt" % i)
                with open(fname, "wb") as f:
                    f.write(("file-%s" % i).encode())
                d.queue.enqueue_many([[fname]]).run()
                d.thread.join()
                self.assertEqual([[("file-%s" % i).encode()]], d.output)
Пример #2
0
 def testManagedMainErrorTwoQueues(self):
     # Tests that the supervisor correctly raises a main loop
     # error even when using multiple queues for input.
     logdir = self._test_dir("managed_main_error_two_queues")
     os.makedirs(logdir)
     data_path = self._csv_data(logdir)
     with self.assertRaisesRegexp(RuntimeError, "fail at step 3"):
         with ops.Graph().as_default():
             # Create an input pipeline that reads the file 3 times.
             filename_queue = input_lib.string_input_producer([data_path],
                                                              num_epochs=3)
             reader = io_ops.TextLineReader()
             _, csv = reader.read(filename_queue)
             rec = parsing_ops.decode_csv(csv,
                                          record_defaults=[[1], [1], [1]])
             shuff_rec = input_lib.shuffle_batch(rec, 1, 6, 4)
             sv = supervisor.Supervisor(logdir=logdir)
             with sv.managed_session("") as sess:
                 for step in range(9):
                     if sv.should_stop():
                         break
                     elif step == 3:
                         raise RuntimeError("fail at step 3")
                     else:
                         sess.run(shuff_rec)
Пример #3
0
  def _testOneEpoch(self, files):
    reader = io_ops.TextLineReader(name="test_reader")
    queue = data_flow_ops.FIFOQueue(99, [dtypes.string], shapes=())
    key, value = reader.read(queue)

    self.evaluate(queue.enqueue_many([files]))
    self.evaluate(queue.close())
    for i in range(self._num_files):
      for j in range(self._num_lines):
        k, v = self.evaluate([key, value])
        self.assertAllEqual("%s:%d" % (files[i], j + 1), compat.as_text(k))
        self.assertAllEqual(self._LineText(i, j), v)

    with self.assertRaisesOpError("is closed and has insufficient elements "
                                  "\\(requested 1, current size 0\\)"):
      k, v = self.evaluate([key, value])
Пример #4
0
 def testManagedEndOfInputOneQueue(self):
     # Tests that the supervisor finishes without an error when using
     # a fixed number of epochs, reading from a single queue.
     logdir = self._test_dir("managed_end_of_input_one_queue")
     os.makedirs(logdir)
     data_path = self._csv_data(logdir)
     with ops.Graph().as_default():
         # Create an input pipeline that reads the file 3 times.
         filename_queue = input_lib.string_input_producer([data_path],
                                                          num_epochs=3)
         reader = io_ops.TextLineReader()
         _, csv = reader.read(filename_queue)
         rec = parsing_ops.decode_csv(csv, record_defaults=[[1], [1], [1]])
         sv = supervisor.Supervisor(logdir=logdir)
         with sv.managed_session("") as sess:
             while not sv.should_stop():
                 sess.run(rec)
Пример #5
0
  def testSkipHeaderLines(self):
    files = self._CreateFiles()
    with self.cached_session() as sess:
      reader = io_ops.TextLineReader(skip_header_lines=1, name="test_reader")
      queue = data_flow_ops.FIFOQueue(99, [dtypes.string], shapes=())
      key, value = reader.read(queue)

      queue.enqueue_many([files]).run()
      queue.close().run()
      for i in range(self._num_files):
        for j in range(self._num_lines - 1):
          k, v = sess.run([key, value])
          self.assertAllEqual("%s:%d" % (files[i], j + 2), compat.as_text(k))
          self.assertAllEqual(self._LineText(i, j + 1), v)

      with self.assertRaisesOpError("is closed and has insufficient elements "
                                    "\\(requested 1, current size 0\\)"):
        k, v = sess.run([key, value])
 def _get_reader(self):
     return io_ops.TextLineReader(skip_header_lines=self._skip_header_lines)