Example #1
0
 def encode_to_stream(self, cols, out_stream, nested):
     data_out_stream = self.data_out_stream
     self._batch_writer.write_batch(
         pandas_to_arrow(self._schema, self._timezone, self._field_types, cols))
     out_stream.write_var_int64(data_out_stream.size())
     out_stream.write(data_out_stream.get())
     data_out_stream._clear()
Example #2
0
 def encode_to_stream(self, cols, out_stream: OutputStream):
     self._resettable_io.set_output_stream(out_stream)
     batch_writer = pa.RecordBatchStreamWriter(self._resettable_io,
                                               self._schema)
     batch_writer.write_batch(
         pandas_to_arrow(self._schema, self._timezone, self._field_types,
                         cols))
Example #3
0
 def encode_to_stream(self, cols, out_stream, nested):
     data_out_stream = self.data_out_stream
     batch_writer = pa.RecordBatchStreamWriter(self._resettable_io,
                                               self._schema)
     batch_writer.write_batch(
         pandas_to_arrow(self._schema, self._timezone, self._field_types,
                         cols))
     out_stream.write_var_int64(data_out_stream.size())
     out_stream.write(data_out_stream.get())
     data_out_stream._clear()
Example #4
0
 def dump_to_stream(self, iterator, stream):
     writer = None
     try:
         for cols in iterator:
             batch = pandas_to_arrow(self._schema, self._timezone, self._field_types, cols)
             if writer is None:
                 import pyarrow as pa
                 writer = pa.RecordBatchStreamWriter(stream, batch.schema)
             writer.write_batch(batch)
     finally:
         if writer is not None:
             writer.close()