示例#1
0
def read_tfrecords(path, proto=None, max_records=None, compression_type=None):
  """Yields the parsed records in a TFRecord file path.

  Note that path can be sharded filespec (path@N) in which case this function
  will read each shard in order; i.e. shard 0 will read each entry in order,
  then shard 1, ...

  Args:
    path: String. A path to a TFRecord file containing protos.
    proto: A proto class. proto.FromString() will be called on each serialized
      record in path to parse it.
    max_records: int >= 0 or None. Maximum number of records to read from path.
      If None, the default, all records will be read.
    compression_type: 'GZIP', 'ZLIB', '' (uncompressed), or None to autodetect
      based on file extension.

  Yields:
    proto.FromString() values on each record in path in order.
  """
  if sharded_file_utils.is_sharded_file_spec(path):
    paths = sharded_file_utils.generate_sharded_filenames(path)
  else:
    paths = [path]

  i = 0
  for path in paths:
    for record in Reader(path, proto, compression_type):
      i += 1
      if max_records is not None and i > max_records:
        return
      yield record
示例#2
0
def read_shard_sorted_tfrecords(path,
                                key,
                                proto=None,
                                max_records=None,
                                options=None):
    """Yields the parsed records in a TFRecord file path in sorted order.

  The input TFRecord file must have each shard already in sorted order when
  using the key function for comparison (but elements can be interleaved across
  shards). Under those constraints, the elements will be yielded in a global
  sorted order.

  Args:
    path: String. A path to a TFRecord-formatted file containing protos.
    key: Callable. A function that takes as input a single instance of the proto
      class and returns a value on which the comparison for sorted ordering is
      performed.
    proto: A proto class. proto.FromString() will be called on each serialized
      record in path to parse it.
    max_records: int >= 0 or None. Maximum number of records to read from path.
      If None, the default, all records will be read.
    options: A python_io.TFRecordOptions object for the reader.

  Yields:
    proto.FromString() values on each record in path in sorted order.
  """
    if proto is None:
        proto = example_pb2.Example

    if options is None:
        options = make_tfrecord_options(path)

    if sharded_file_utils.is_sharded_file_spec(path):
        paths = sharded_file_utils.generate_sharded_filenames(path)
    else:
        paths = [path]

    keyed_iterables = []
    for path in paths:
        protos = (proto.FromString(buf)
                  for buf in python_io.tf_record_iterator(path, options))
        keyed_iterables.append(((key(elem), elem) for elem in protos))

    for i, (_, value) in enumerate(heapq.merge(*keyed_iterables)):
        if max_records is not None and i >= max_records:
            return
        yield value
示例#3
0
def read_shard_sorted_tfrecords(path,
                                key,
                                proto=None,
                                max_records=None,
                                compression_type=None):
    """Yields the parsed records in a TFRecord file path in sorted order.

  The input TFRecord file must have each shard already in sorted order when
  using the key function for comparison (but elements can be interleaved across
  shards). Under those constraints, the elements will be yielded in a global
  sorted order.

  Args:
    path: String. A path to a TFRecord-formatted file containing protos.
    key: Callable. A function that takes as input a single instance of the proto
      class and returns a value on which the comparison for sorted ordering is
      performed.
    proto: A proto class. proto.FromString() will be called on each serialized
      record in path to parse it.
    max_records: int >= 0 or None. Maximum number of records to read from path.
      If None, the default, all records will be read.
    compression_type: 'GZIP', 'ZLIB', '' (uncompressed), or None to autodetect
      based on file extension.

  Yields:
    proto.FromString() values on each record in path in sorted order.
  """
    if sharded_file_utils.is_sharded_file_spec(path):
        paths = sharded_file_utils.generate_sharded_filenames(path)
    else:
        paths = [path]

    keyed_iterables = []
    for path in paths:
        protos = Reader(path, proto,
                        compression_type=compression_type).iterate()
        keyed_iterables.append(((key(elem), elem) for elem in protos))

    for i, (_, value) in enumerate(heapq.merge(*keyed_iterables)):
        if max_records is not None and i >= max_records:
            return
        yield value
示例#4
0
def read_tfrecords(path, proto=None, max_records=None, options=None):
    """Yields the parsed records in a TFRecord file path.

  Note that path can be sharded filespec (path@N) in which case this function
  will read each shard in order; i.e. shard 0 will read each entry in order,
  then shard 1, ...

  Args:
    path: String. A path to a TFRecord file containing protos.
    proto: A proto class. proto.FromString() will be called on each serialized
      record in path to parse it.
    max_records: int >= 0 or None. Maximum number of records to read from path.
      If None, the default, all records will be read.
    options: A python_io.TFRecordOptions object for the reader.

  Yields:
    proto.FromString() values on each record in path in order.
  """
    if not proto:
        proto = example_pb2.Example

    if not options:
        options = make_tfrecord_options(path)

    if sharded_file_utils.is_sharded_file_spec(path):
        paths = sharded_file_utils.generate_sharded_filenames(path)
    else:
        paths = [path]

    i = 0
    for path in paths:
        for buf in python_io.tf_record_iterator(path, options):
            i += 1
            if max_records is not None and i > max_records:
                return
            yield proto.FromString(buf)
 def testGenerateShardedFilenamesManyShards(self):
   names = io.generate_sharded_filenames('/dir/foo/bar@100000')
   self.assertEqual(len(names), 100000)
   self.assertEqual(names[99999], '/dir/foo/bar-099999-of-100000')
 def testGenerateShardedFilenames(self, spec, expected):
   names = io.generate_sharded_filenames(spec)
   self.assertEqual(names, expected)