def test_writes_rows_is_cache_is_large(self, fake_write_rows): temp_fs = fsopendir("temp://") parent = MagicMock() writer = HDFWriter(parent, temp_fs.getsyspath("temp.h5")) writer.cache = [[] for i in range(10000)] writer.insert_row(["row1"]) fake_write_rows.assert_called_once_with()
def test_writes_rows_is_cache_is_large(self, fake_write_rows): temp_fs = fsopendir('temp://') parent = MagicMock() writer = HDFWriter(parent, temp_fs.getsyspath('temp.h5')) writer.cache = [[] for i in range(10000)] writer.insert_row(['row1']) fake_write_rows.assert_called_once_with()
def test_writes_cached_rows(self): temp_fs = fsopendir("temp://") parent = MagicMock() writer = HDFWriter(parent, temp_fs.getsyspath("temp.h5")) # add two columns writer.meta["schema"].append(self._get_column("col1", "int")) writer.meta["schema"].append(self._get_column("col2", "str")) writer.cache = [[1, "row1"], [2, "row2"]] writer._write_rows() self.assertEqual(writer.cache, []) # rows are written self.assertEqual(writer._h5_file.root.partition.rows.nrows, 2) self.assertEqual([x["col1"] for x in writer._h5_file.root.partition.rows.iterrows()], [1, 2]) self.assertEqual([x["col2"] for x in writer._h5_file.root.partition.rows.iterrows()], [b("row1"), b("row2")])
def test_writes_cached_rows(self): temp_fs = fsopendir('temp://') parent = MagicMock() writer = HDFWriter(parent, temp_fs.getsyspath('temp.h5')) # add two columns writer.meta['schema'].append(self._get_column('col1', 'int')) writer.meta['schema'].append(self._get_column('col2', 'str')) writer.cache = [[1, 'row1'], [2, 'row2']] writer._write_rows() self.assertEqual(writer.cache, []) # rows are written self.assertEqual(writer._h5_file.root.partition.rows.nrows, 2) self.assertEqual([ x['col1'] for x in writer._h5_file.root.partition.rows.iterrows() ], [1, 2]) self.assertEqual([ x['col2'] for x in writer._h5_file.root.partition.rows.iterrows() ], [b('row1'), b('row2')])