def testReadFileIgnoreError(self):
    def write_string_to_file(value, filename):
      with open(filename, "w") as f:
        f.write(value)
    filenames = [os.path.join(self.get_temp_dir(), "file_%d.txt" % i)
                 for i in range(5)]
    for filename in filenames:
      write_string_to_file(filename, filename)

    dataset = (dataset_ops.Dataset.from_tensor_slices(filenames).map(
        io_ops.read_file, num_threads=2, output_buffer_size=2).apply(
            error_ops.ignore_errors()))
    iterator = dataset.make_initializable_iterator()
    init_op = iterator.initializer
    get_next = iterator.get_next()

    with self.test_session() as sess:
      # All of the files are present.
      sess.run(init_op)
      for filename in filenames:
        self.assertEqual(compat.as_bytes(filename), sess.run(get_next))
      with self.assertRaises(errors.OutOfRangeError):
        sess.run(get_next)

      # Delete one of the files.
      os.remove(filenames[0])

      # Attempting to read filenames[0] will fail, but ignore_errors()
      # will catch the error.
      sess.run(init_op)
      for filename in filenames[1:]:
        self.assertEqual(compat.as_bytes(filename), sess.run(get_next))
      with self.assertRaises(errors.OutOfRangeError):
        sess.run(get_next)
  def testReadFileIgnoreError(self):
    def write_string_to_file(value, filename):
      with open(filename, "w") as f:
        f.write(value)
    filenames = [os.path.join(self.get_temp_dir(), "file_%d.txt" % i)
                 for i in range(5)]
    for filename in filenames:
      write_string_to_file(filename, filename)

    dataset = (
        dataset_ops.Dataset.from_tensor_slices(filenames).map(
            io_ops.read_file, num_parallel_calls=2).prefetch(2).apply(
                error_ops.ignore_errors()))
    iterator = dataset.make_initializable_iterator()
    init_op = iterator.initializer
    get_next = iterator.get_next()

    with self.test_session() as sess:
      # All of the files are present.
      sess.run(init_op)
      for filename in filenames:
        self.assertEqual(compat.as_bytes(filename), sess.run(get_next))
      with self.assertRaises(errors.OutOfRangeError):
        sess.run(get_next)

      # Delete one of the files.
      os.remove(filenames[0])

      # Attempting to read filenames[0] will fail, but ignore_errors()
      # will catch the error.
      sess.run(init_op)
      for filename in filenames[1:]:
        self.assertEqual(compat.as_bytes(filename), sess.run(get_next))
      with self.assertRaises(errors.OutOfRangeError):
        sess.run(get_next)
示例#3
0
 def testCsvDataset_ignoreErrWithUnquotedQuotes(self):
   record_defaults = [['']] * 3
   inputs = [['1,2"3,4', 'a,b,c"d', '9,8"7,6,5', 'e,f,g']]
   filenames = self._setup_files(inputs)
   dataset = readers.CsvDataset(filenames, record_defaults=record_defaults)
   dataset = dataset.apply(error_ops.ignore_errors())
   self._verify_output_or_err(dataset, [['e', 'f', 'g']])
 def testCsvDataset_ignoreErrWithUnquotedQuotes(self):
   record_defaults = [['']] * 3
   inputs = [['1,2"3,4', 'a,b,c"d', '9,8"7,6,5', 'e,f,g']]
   filenames = self.setup_files(inputs)
   with ops.Graph().as_default() as g:
     with self.test_session(graph=g) as sess:
       dataset = readers.CsvDataset(filenames, record_defaults=record_defaults)
       dataset = dataset.apply(error_ops.ignore_errors())
       self._verify_output_or_err(sess, dataset, [['e', 'f', 'g']])
示例#5
0
    def testMapIgnoreError(self):
        components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)

        dataset = (dataset_ops.Dataset.from_tensor_slices(components).map(
            lambda x: array_ops.check_numerics(x, "message")).apply(
                error_ops.ignore_errors()))
        iterator = dataset.make_initializable_iterator()
        init_op = iterator.initializer
        get_next = iterator.get_next()

        with self.test_session() as sess:
            sess.run(init_op)
            for x in [1., 2., 3., 5.]:
                self.assertEqual(x, sess.run(get_next))
            with self.assertRaises(errors.OutOfRangeError):
                sess.run(get_next)
  def testMapIgnoreError(self):
    components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)

    dataset = (dataset_ops.Dataset.from_tensor_slices(components)
               .map(lambda x: array_ops.check_numerics(x, "message")).apply(
                   error_ops.ignore_errors()))
    iterator = dataset.make_initializable_iterator()
    init_op = iterator.initializer
    get_next = iterator.get_next()

    with self.test_session() as sess:
      sess.run(init_op)
      for x in [1., 2., 3., 5.]:
        self.assertEqual(x, sess.run(get_next))
      with self.assertRaises(errors.OutOfRangeError):
        sess.run(get_next)
示例#7
0
    def ignore_errors(self):
        """Deprecated: Use `Dataset.apply(tf.contrib.data.ignore_errors())`."""

        return self.apply(error_ops.ignore_errors())
 def _build_ds(self, components):
     return dataset_ops.Dataset.from_tensor_slices(components).map(
         lambda x: array_ops.check_numerics(x, "message")).apply(
             error_ops.ignore_errors())
 def _build_ds(self, components):
   return dataset_ops.Dataset.from_tensor_slices(components).map(
       lambda x: array_ops.check_numerics(x, "message")).apply(
           error_ops.ignore_errors())
示例#10
0
  def ignore_errors(self):
    """Deprecated: Use `Dataset.apply(tf.contrib.data.ignore_errors())`."""

    return self.apply(error_ops.ignore_errors())