def test_iterator(self): l = [ self.frame['float'], self.frame['float'].A, self.frame['float'].B, None ] with ensure_clean(self.path) as path: to_msgpack(path, *l) for i, packed in enumerate(read_msgpack(path, iterator=True)): check_arbitrary(packed, l[i])
def test_string_io(self): df = DataFrame(np.random.randn(10, 2)) s = df.to_msgpack(None) result = read_msgpack(s) tm.assert_frame_equal(result, df) s = df.to_msgpack() result = read_msgpack(s) tm.assert_frame_equal(result, df) s = df.to_msgpack() result = read_msgpack(compat.BytesIO(s)) tm.assert_frame_equal(result, df) s = to_msgpack(None, df) result = read_msgpack(s) tm.assert_frame_equal(result, df) with ensure_clean(self.path) as p: s = df.to_msgpack() fh = open(p, 'wb') fh.write(s) fh.close() result = read_msgpack(p) tm.assert_frame_equal(result, df)
def _msg_pack_job(self, property_name, filename, callback_function, files_to_invariant_on): out_dir = self.cache_dir / "lookup" out_dir.mkdir(exist_ok=True) if not ppg.util.inside_ppg(): if not Path(filename).exists(): # pragma: no branch df = callback_function(self) pandas_msgpack.to_msgpack(out_dir / filename, df) else: def dump(output_filename): df = callback_function(self) pandas_msgpack.to_msgpack(output_filename, df) j = ppg.FileGeneratingJob(out_dir / filename, dump).depends_on( ppg.FunctionInvariant(out_dir / filename / property_name, callback_function)) for f in files_to_invariant_on: j.depends_on_file(f) self._prebuilds.append(j) return j return
def dump(output_filename): df = callback_function(self) pandas_msgpack.to_msgpack(output_filename / filename, df)
def test_iterator_with_string_io(self): dfs = [DataFrame(np.random.randn(10, 2)) for i in range(5)] s = to_msgpack(None, *dfs) for i, result in enumerate(read_msgpack(s, iterator=True)): tm.assert_frame_equal(result, dfs[i])
def encode_decode(self, x, compress=None, **kwargs): with ensure_clean(self.path) as p: to_msgpack(p, x, compress=compress, **kwargs) return read_msgpack(p, **kwargs)