def _create_tfrecord_dataset(tmpdir):
  if not gfile.Exists(tmpdir):
    gfile.MakeDirs(tmpdir)

  data_sources = test_utils.create_tfrecord_files(tmpdir, num_files=1)

  keys_to_features = {
      'image/encoded':
          parsing_ops.FixedLenFeature(
              shape=(), dtype=dtypes.string, default_value=''),
      'image/format':
          parsing_ops.FixedLenFeature(
              shape=(), dtype=dtypes.string, default_value='jpeg'),
      'image/class/label':
          parsing_ops.FixedLenFeature(
              shape=[1],
              dtype=dtypes.int64,
              default_value=array_ops.zeros(
                  [1], dtype=dtypes.int64))
  }

  items_to_handlers = {
      'image': tfexample_decoder.Image(),
      'label': tfexample_decoder.Tensor('image/class/label'),
  }

  decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
                                               items_to_handlers)

  return dataset.Dataset(
      data_sources=data_sources,
      reader=io_ops.TFRecordReader,
      decoder=decoder,
      num_samples=100,
      items_to_descriptions=None)
def _create_tfrecord_dataset(tmpdir):
  data_sources = test_utils.create_tfrecord_files(
      tmpdir,
      num_files=1)

  keys_to_features = {
      'image/encoded': tf.FixedLenFeature(
          shape=(), dtype=tf.string, default_value=''),
      'image/format': tf.FixedLenFeature(
          shape=(), dtype=tf.string, default_value='jpeg'),
      'image/class/label': tf.FixedLenFeature(
          shape=[1], dtype=tf.int64,
          default_value=tf.zeros([1], dtype=tf.int64))
  }

  items_to_handlers = {
      'image': slim.tfexample_decoder.Image(),
      'label': slim.tfexample_decoder.Tensor('image/class/label'),
  }

  decoder = slim.tfexample_decoder.TFExampleDecoder(
      keys_to_features, items_to_handlers)

  return slim.dataset.Dataset(
      data_sources=data_sources,
      reader=tf.TFRecordReader,
      decoder=decoder,
      num_samples=100,
      items_to_descriptions=None)
    def _verify_all_data_sources_read(self, shared_queue):
        with self.test_session():
            tfrecord_paths = test_utils.create_tfrecord_files(self.get_temp_dir(), num_files=3)

        num_readers = len(tfrecord_paths)
        p_reader = slim.parallel_reader.ParallelReader(tf.TFRecordReader, shared_queue, num_readers=num_readers)

        data_files = slim.parallel_reader.get_data_files(tfrecord_paths)
        filename_queue = tf.train.string_input_producer(data_files)
        key, value = p_reader.read(filename_queue)

        count0 = 0
        count1 = 0
        count2 = 0

        num_reads = 50

        sv = tf.train.Supervisor(logdir=self.get_temp_dir())
        with sv.prepare_or_wait_for_session() as sess:
            sv.start_queue_runners(sess)

            for _ in range(num_reads):
                current_key, _ = sess.run([key, value])
                if "0-of-3" in str(current_key):
                    count0 += 1
                if "1-of-3" in str(current_key):
                    count1 += 1
                if "2-of-3" in str(current_key):
                    count2 += 1

        self.assertGreater(count0, 0)
        self.assertGreater(count1, 0)
        self.assertGreater(count2, 0)
        self.assertEquals(count0 + count1 + count2, num_reads)
def _create_tfrecord_dataset(tmpdir):
  if not gfile.Exists(tmpdir):
    gfile.MakeDirs(tmpdir)

  data_sources = test_utils.create_tfrecord_files(tmpdir, num_files=1)

  keys_to_features = {
      'image/encoded':
          parsing_ops.FixedLenFeature(
              shape=(), dtype=dtypes.string, default_value=''),
      'image/format':
          parsing_ops.FixedLenFeature(
              shape=(), dtype=dtypes.string, default_value='jpeg'),
      'image/class/label':
          parsing_ops.FixedLenFeature(
              shape=[1],
              dtype=dtypes.int64,
              default_value=array_ops.zeros(
                  [1], dtype=dtypes.int64))
  }

  items_to_handlers = {
      'image': tfexample_decoder.Image(),
      'label': tfexample_decoder.Tensor('image/class/label'),
  }

  decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
                                               items_to_handlers)

  return dataset.Dataset(
      data_sources=data_sources,
      reader=io_ops.TFRecordReader,
      decoder=decoder,
      num_samples=100,
      items_to_descriptions=None)
  def _verify_read_up_to_out(self, shared_queue):
    with self.test_session():
      num_files = 3
      num_records_per_file = 7
      tfrecord_paths = test_utils.create_tfrecord_files(
          self.get_temp_dir(),
          num_files=num_files,
          num_records_per_file=num_records_per_file)

    p_reader = parallel_reader.ParallelReader(
        io_ops.TFRecordReader, shared_queue, num_readers=5)

    data_files = parallel_reader.get_data_files(tfrecord_paths)
    filename_queue = input_lib.string_input_producer(data_files, num_epochs=1)
    key, value = p_reader.read_up_to(filename_queue, 4)

    count0 = 0
    count1 = 0
    count2 = 0
    all_keys_count = 0
    all_values_count = 0

    sv = supervisor.Supervisor(logdir=self.get_temp_dir())
    with sv.prepare_or_wait_for_session() as sess:
      sv.start_queue_runners(sess)
      while True:
        try:
          current_keys, current_values = sess.run([key, value])
          self.assertEquals(len(current_keys), len(current_values))
          all_keys_count += len(current_keys)
          all_values_count += len(current_values)
          for current_key in current_keys:
            if '0-of-3' in str(current_key):
              count0 += 1
            if '1-of-3' in str(current_key):
              count1 += 1
            if '2-of-3' in str(current_key):
              count2 += 1
        except errors_impl.OutOfRangeError:
          break

    self.assertEquals(count0, num_records_per_file)
    self.assertEquals(count1, num_records_per_file)
    self.assertEquals(count2, num_records_per_file)
    self.assertEquals(
        all_keys_count,
        num_files * num_records_per_file)
    self.assertEquals(all_values_count, all_keys_count)
    self.assertEquals(
        count0 + count1 + count2,
        all_keys_count)
    def testOutOfRangeError(self):
        with self.test_session():
            [tfrecord_path] = test_utils.create_tfrecord_files(self.get_temp_dir(), num_files=1)

        key, value = slim.parallel_reader.single_pass_read(tfrecord_path, reader_class=tf.TFRecordReader)
        init_op = tf.initialize_local_variables()

        with self.test_session() as sess:
            sess.run(init_op)
            with tf.contrib.slim.queues.QueueRunners(sess):
                num_reads = 11
                with self.assertRaises(tf.errors.OutOfRangeError):
                    for _ in range(num_reads):
                        sess.run([key, value])
Beispiel #7
0
    def _verify_read_up_to_out(self, shared_queue):
        with self.test_session():
            num_files = 3
            num_records_per_file = 7
            tfrecord_paths = test_utils.create_tfrecord_files(
                self.get_temp_dir(),
                num_files=num_files,
                num_records_per_file=num_records_per_file)

        p_reader = parallel_reader.ParallelReader(io_ops.TFRecordReader,
                                                  shared_queue,
                                                  num_readers=5)

        data_files = parallel_reader.get_data_files(tfrecord_paths)
        filename_queue = input_lib.string_input_producer(data_files,
                                                         num_epochs=1)
        key, value = p_reader.read_up_to(filename_queue, 4)

        count0 = 0
        count1 = 0
        count2 = 0
        all_keys_count = 0
        all_values_count = 0

        sv = supervisor.Supervisor(logdir=self.get_temp_dir())
        with sv.prepare_or_wait_for_session() as sess:
            sv.start_queue_runners(sess)
            while True:
                try:
                    current_keys, current_values = sess.run([key, value])
                    self.assertEquals(len(current_keys), len(current_values))
                    all_keys_count += len(current_keys)
                    all_values_count += len(current_values)
                    for current_key in current_keys:
                        if '0-of-3' in str(current_key):
                            count0 += 1
                        if '1-of-3' in str(current_key):
                            count1 += 1
                        if '2-of-3' in str(current_key):
                            count2 += 1
                except errors_impl.OutOfRangeError:
                    break

        self.assertEquals(count0, num_records_per_file)
        self.assertEquals(count1, num_records_per_file)
        self.assertEquals(count2, num_records_per_file)
        self.assertEquals(all_keys_count, num_files * num_records_per_file)
        self.assertEquals(all_values_count, all_keys_count)
        self.assertEquals(count0 + count1 + count2, all_keys_count)
Beispiel #8
0
    def testOutOfRangeError(self):
        with self.test_session():
            [tfrecord_path
             ] = test_utils.create_tfrecord_files(self.get_temp_dir(),
                                                  num_files=1)

        key, value = slim.parallel_reader.single_pass_read(
            tfrecord_path, reader_class=tf.TFRecordReader)
        init_op = tf.initialize_local_variables()

        with self.test_session() as sess:
            sess.run(init_op)
            with tf.contrib.slim.queues.QueueRunners(sess):
                num_reads = 11
                with self.assertRaises(tf.errors.OutOfRangeError):
                    for _ in range(num_reads):
                        sess.run([key, value])
    def testOutOfRangeError(self):
        with self.cached_session():
            [tfrecord_path
             ] = test_utils.create_tfrecord_files(self.get_temp_dir(),
                                                  num_files=1)

        key, value = parallel_reader.single_pass_read(
            tfrecord_path, reader_class=io_ops.TFRecordReader)
        init_op = variables.local_variables_initializer()

        with self.cached_session() as sess:
            sess.run(init_op)
            with queues.QueueRunners(sess):
                num_reads = 11
                with self.assertRaises(errors_impl.OutOfRangeError):
                    for _ in range(num_reads):
                        sess.run([key, value])
    def testTFRecordReader(self):
        with self.test_session():
            [tfrecord_path] = test_utils.create_tfrecord_files(self.get_temp_dir(), num_files=1)

        key, value = slim.parallel_reader.single_pass_read(tfrecord_path, reader_class=tf.TFRecordReader)
        init_op = tf.initialize_local_variables()

        with self.test_session() as sess:
            sess.run(init_op)
            with tf.contrib.slim.queues.QueueRunners(sess):
                flowers = 0
                num_reads = 9
                for _ in range(num_reads):
                    current_key, _ = sess.run([key, value])
                    if "flowers" in str(current_key):
                        flowers += 1
                self.assertGreater(flowers, 0)
                self.assertEquals(flowers, num_reads)
    def testTFRecordReader(self):
        with self.test_session():
            self._tfrecord_paths = test_utils.create_tfrecord_files(self.get_temp_dir(), num_files=3)

        key, value = slim.parallel_reader.parallel_read(
            self._tfrecord_paths, reader_class=tf.TFRecordReader, num_readers=3
        )

        sv = tf.train.Supervisor(logdir=self.get_temp_dir())
        with sv.prepare_or_wait_for_session() as sess:
            sv.start_queue_runners(sess)

            flowers = 0
            num_reads = 100
            for _ in range(num_reads):
                current_key, _ = sess.run([key, value])
                if "flowers" in str(current_key):
                    flowers += 1
            self.assertGreater(flowers, 0)
            self.assertEquals(flowers, num_reads)
Beispiel #12
0
    def testTFRecordReader(self):
        with self.test_session():
            [tfrecord_path
             ] = test_utils.create_tfrecord_files(self.get_temp_dir(),
                                                  num_files=1)

        key, value = slim.parallel_reader.single_pass_read(
            tfrecord_path, reader_class=tf.TFRecordReader)
        init_op = tf.initialize_local_variables()

        with self.test_session() as sess:
            sess.run(init_op)
            with tf.contrib.slim.queues.QueueRunners(sess):
                flowers = 0
                num_reads = 9
                for _ in range(num_reads):
                    current_key, _ = sess.run([key, value])
                    if 'flowers' in str(current_key):
                        flowers += 1
                self.assertGreater(flowers, 0)
                self.assertEquals(flowers, num_reads)
Beispiel #13
0
    def testTFRecordReader(self):
        with self.test_session():
            self._tfrecord_paths = test_utils.create_tfrecord_files(
                self.get_temp_dir(), num_files=3)

        key, value = parallel_reader.parallel_read(
            self._tfrecord_paths,
            reader_class=io_ops.TFRecordReader,
            num_readers=3)

        sv = supervisor.Supervisor(logdir=self.get_temp_dir())
        with sv.managed_session() as sess:

            flowers = 0
            num_reads = 100
            for _ in range(num_reads):
                current_key, _ = sess.run([key, value])
                if 'flowers' in str(current_key):
                    flowers += 1
            self.assertGreater(flowers, 0)
            self.assertEquals(flowers, num_reads)
Beispiel #14
0
  def _verify_all_data_sources_read(self, shared_queue):
    with self.test_session():
      tfrecord_paths = test_utils.create_tfrecord_files(
          self.get_temp_dir(),
          num_files=3)

    num_readers = len(tfrecord_paths)
    p_reader = slim.parallel_reader.ParallelReader(
        tf.TFRecordReader,
        shared_queue,
        num_readers=num_readers)

    data_files = slim.parallel_reader.get_data_files(
        tfrecord_paths)
    filename_queue = tf.train.string_input_producer(data_files)
    key, value = p_reader.read(filename_queue)

    count0 = 0
    count1 = 0
    count2 = 0

    num_reads = 50

    sv = tf.train.Supervisor(logdir=self.get_temp_dir())
    with sv.prepare_or_wait_for_session() as sess:
      sv.start_queue_runners(sess)

      for _ in range(num_reads):
        current_key, _ = sess.run([key, value])
        if '0-of-3' in str(current_key):
          count0 += 1
        if '1-of-3' in str(current_key):
          count1 += 1
        if '2-of-3' in str(current_key):
          count2 += 1

    self.assertGreater(count0, 0)
    self.assertGreater(count1, 0)
    self.assertGreater(count2, 0)
    self.assertEquals(count0 + count1 + count2, num_reads)
Beispiel #15
0
    def testTFRecordReader(self):
        with self.test_session():
            self._tfrecord_paths = test_utils.create_tfrecord_files(
                self.get_temp_dir(), num_files=3)

        key, value = slim.parallel_reader.parallel_read(
            self._tfrecord_paths,
            reader_class=tf.TFRecordReader,
            num_readers=3)

        sv = tf.train.Supervisor(logdir=self.get_temp_dir())
        with sv.prepare_or_wait_for_session() as sess:
            sv.start_queue_runners(sess)

            flowers = 0
            num_reads = 100
            for _ in range(num_reads):
                current_key, _ = sess.run([key, value])
                if 'flowers' in str(current_key):
                    flowers += 1
            self.assertGreater(flowers, 0)
            self.assertEquals(flowers, num_reads)