def _writeFileHeader(self): self._stream.write(VERSION) Text.writeString(self._stream, self.getKeyClassName()) Text.writeString(self._stream, self.getValueClassName()) self._stream.writeBoolean(self._compress) self._stream.writeBoolean(self._block_compress) if self._codec: Text.writeString(self._stream, 'org.apache.hadoop.io.compress.DefaultCodec') self._metadata.write(self._stream) self._stream.write(self._sync)
def _writeFileHeader(self): self._stream.write(VERSION) Text.writeString(self._stream, self.getKeyClassName()) Text.writeString(self._stream, self.getValueClassName()) self._stream.writeBoolean(self._compress) self._stream.writeBoolean(self._block_compress) if self._codec: Text.writeString(self._stream, hadoopClassName(self._codec.__class__)) self._metadata.write(self._stream) self._stream.write(self._sync)
def _writeFileHeader(self): self._stream.write(VERSION) Text.writeString(self._stream, self.getKeyClassName()) Text.writeString(self._stream, self.getValueClassName()) self._stream.writeBoolean(self._compress) self._stream.writeBoolean(self._block_compress) if self._codec: Text.writeString(self._stream, hadoopClassName(self._codec.__class__)) self._metadata.write(self._stream) self._stream.write(self._sync)
def write(self, data_output): data_output.writeInt(len(self._meta)) for key, value in self._meta.iteritems(): Text.writeString(data_output, key) Text.writeString(data_output, value)