Ejemplo n.º 1
0
 def _as_variant_tensor(self):
   if (self._compression_type is not None or
       compat.forward_compatible(2018, 11, 30)):
     return gen_dataset_ops.fixed_length_record_dataset_v2(
         self._filenames, self._header_bytes, self._record_bytes,
         self._footer_bytes, self._buffer_size, self._compression_type)
   else:
     return gen_dataset_ops.fixed_length_record_dataset(
         self._filenames, self._header_bytes, self._record_bytes,
         self._footer_bytes, self._buffer_size)
Ejemplo n.º 2
0
 def _as_variant_tensor(self):
     if (self._compression_type is not None
             or compat.forward_compatible(2018, 11, 30)):
         return gen_dataset_ops.fixed_length_record_dataset_v2(
             self._filenames, self._header_bytes, self._record_bytes,
             self._footer_bytes, self._buffer_size, self._compression_type)
     else:
         return gen_dataset_ops.fixed_length_record_dataset(
             self._filenames, self._header_bytes, self._record_bytes,
             self._footer_bytes, self._buffer_size)
Ejemplo n.º 3
0
    def __init__(self,
                 filenames,
                 record_bytes,
                 header_bytes=None,
                 footer_bytes=None,
                 buffer_size=None,
                 compression_type=None,
                 name=None):
        """Creates a `FixedLengthRecordDataset`.

    Args:
      filenames: A `tf.string` tensor containing one or more filenames.
      record_bytes: A `tf.int64` scalar representing the number of bytes in each
        record.
      header_bytes: (Optional.) A `tf.int64` scalar representing the number of
        bytes to skip at the start of a file.
      footer_bytes: (Optional.) A `tf.int64` scalar representing the number of
        bytes to ignore at the end of a file.
      buffer_size: (Optional.) A `tf.int64` scalar representing the number of
        bytes to buffer when reading.
      compression_type: (Optional.) A `tf.string` scalar evaluating to one of
        `""` (no compression), `"ZLIB"`, or `"GZIP"`.
      name: (Optional.) A name for the tf.data operation.
    """
        self._filenames = filenames
        self._record_bytes = ops.convert_to_tensor(record_bytes,
                                                   dtype=dtypes.int64,
                                                   name="record_bytes")
        self._header_bytes = convert.optional_param_to_tensor(
            "header_bytes", header_bytes)
        self._footer_bytes = convert.optional_param_to_tensor(
            "footer_bytes", footer_bytes)
        self._buffer_size = convert.optional_param_to_tensor(
            "buffer_size", buffer_size, _DEFAULT_READER_BUFFER_SIZE_BYTES)
        self._compression_type = convert.optional_param_to_tensor(
            "compression_type",
            compression_type,
            argument_default="",
            argument_dtype=dtypes.string)
        self._metadata = dataset_metadata_pb2.Metadata()
        if name:
            self._metadata.name = dataset_ops._validate_and_encode(name)

        variant_tensor = gen_dataset_ops.fixed_length_record_dataset_v2(
            self._filenames,
            self._header_bytes,
            self._record_bytes,
            self._footer_bytes,
            self._buffer_size,
            self._compression_type,
            metadata=self._metadata.SerializeToString())
        super(_FixedLengthRecordDataset, self).__init__(variant_tensor)
Ejemplo n.º 4
0
    def __init__(self,
                 filenames,
                 record_bytes,
                 header_bytes=None,
                 footer_bytes=None,
                 buffer_size=None,
                 compression_type=None):
        """Creates a `FixedLengthRecordDataset`.

    Args:
      filenames: A `tf.string` tensor containing one or more filenames.
      record_bytes: A `tf.int64` scalar representing the number of bytes in
        each record.
      header_bytes: (Optional.) A `tf.int64` scalar representing the number of
        bytes to skip at the start of a file.
      footer_bytes: (Optional.) A `tf.int64` scalar representing the number of
        bytes to ignore at the end of a file.
      buffer_size: (Optional.) A `tf.int64` scalar representing the number of
        bytes to buffer when reading.
      compression_type: (Optional.) A `tf.string` scalar evaluating to one of
        `""` (no compression), `"ZLIB"`, or `"GZIP"`.
    """
        self._filenames = ops.convert_to_tensor(filenames,
                                                dtype=dtypes.string,
                                                name="filenames")
        self._record_bytes = ops.convert_to_tensor(record_bytes,
                                                   dtype=dtypes.int64,
                                                   name="record_bytes")

        self._header_bytes = convert.optional_param_to_tensor(
            "header_bytes", header_bytes)
        self._footer_bytes = convert.optional_param_to_tensor(
            "footer_bytes", footer_bytes)
        self._buffer_size = convert.optional_param_to_tensor(
            "buffer_size", buffer_size, _DEFAULT_READER_BUFFER_SIZE_BYTES)
        self._compression_type = convert.optional_param_to_tensor(
            "compression_type",
            compression_type,
            argument_default="",
            argument_dtype=dtypes.string)
        if (self._compression_type is not None
                or compat.forward_compatible(2018, 11, 30)):
            variant_tensor = gen_dataset_ops.fixed_length_record_dataset_v2(
                self._filenames, self._header_bytes, self._record_bytes,
                self._footer_bytes, self._buffer_size, self._compression_type)
        else:
            variant_tensor = gen_dataset_ops.fixed_length_record_dataset(
                self._filenames, self._header_bytes, self._record_bytes,
                self._footer_bytes, self._buffer_size)
        super(FixedLengthRecordDatasetV2, self).__init__(variant_tensor)
Ejemplo n.º 5
0
  def __init__(self,
               filenames,
               record_bytes,
               header_bytes=None,
               footer_bytes=None,
               buffer_size=None,
               compression_type=None):
    """Creates a `FixedLengthRecordDataset`.

    Args:
      filenames: A `tf.string` tensor containing one or more filenames.
      record_bytes: A `tf.int64` scalar representing the number of bytes in
        each record.
      header_bytes: (Optional.) A `tf.int64` scalar representing the number of
        bytes to skip at the start of a file.
      footer_bytes: (Optional.) A `tf.int64` scalar representing the number of
        bytes to ignore at the end of a file.
      buffer_size: (Optional.) A `tf.int64` scalar representing the number of
        bytes to buffer when reading.
      compression_type: (Optional.) A `tf.string` scalar evaluating to one of
        `""` (no compression), `"ZLIB"`, or `"GZIP"`.
    """
    self._filenames = ops.convert_to_tensor(
        filenames, dtype=dtypes.string, name="filenames")
    self._record_bytes = ops.convert_to_tensor(
        record_bytes, dtype=dtypes.int64, name="record_bytes")

    self._header_bytes = convert.optional_param_to_tensor(
        "header_bytes", header_bytes)
    self._footer_bytes = convert.optional_param_to_tensor(
        "footer_bytes", footer_bytes)
    self._buffer_size = convert.optional_param_to_tensor(
        "buffer_size", buffer_size, _DEFAULT_READER_BUFFER_SIZE_BYTES)
    self._compression_type = convert.optional_param_to_tensor(
        "compression_type",
        compression_type,
        argument_default="",
        argument_dtype=dtypes.string)
    if (self._compression_type is not None or
        compat.forward_compatible(2018, 11, 30)):
      variant_tensor = gen_dataset_ops.fixed_length_record_dataset_v2(
          self._filenames, self._header_bytes, self._record_bytes,
          self._footer_bytes, self._buffer_size, self._compression_type)
    else:
      variant_tensor = gen_dataset_ops.fixed_length_record_dataset(
          self._filenames, self._header_bytes, self._record_bytes,
          self._footer_bytes, self._buffer_size)
    super(FixedLengthRecordDatasetV2, self).__init__(variant_tensor)