예제 #1
0
파일: test_hdf.py 프로젝트: pylhc/tfs
    def test_read_write(self, tmp_path: Path, df_example: TfsDataFrame):
        """Basic read-write loop test for TfsDataFrames to hdf5 format."""
        out_file = tmp_path / "data_frame.h5"
        write_hdf(out_file, df_example)

        assert out_file.is_file()

        df_read = read_hdf(out_file)
        assert_tfs_frame_equal(df_example, df_read)
예제 #2
0
파일: test_hdf.py 프로젝트: pylhc/tfs
    def test_write_empty_frame(self, tmp_path: Path):
        """Test writing an empty TfsDataFrame."""
        df_example = TfsDataFrame()
        out_file = tmp_path / "data_frame.h5"
        write_hdf(out_file, df_example)

        assert out_file.is_file()

        df_read = read_hdf(out_file)
        assert_tfs_frame_equal(df_example, df_read)
예제 #3
0
파일: test_hdf.py 프로젝트: pylhc/tfs
    def test_write_empty_data(self, tmp_path: Path, df_example: TfsDataFrame):
        """Test writing a TfsDataFrame with empty data."""
        df_example = TfsDataFrame(headers=df_example.headers)
        out_file = tmp_path / "data_frame.h5"
        write_hdf(out_file, df_example)

        assert out_file.is_file()

        df_read = read_hdf(out_file)
        assert_tfs_frame_equal(df_example, df_read)
예제 #4
0
파일: test_hdf.py 프로젝트: pylhc/tfs
    def test_tables_import_fail(self, tmp_path: Path, df_example: TfsDataFrame,
                                monkeypatch):
        out_file = tmp_path / "data_frame.h5"
        monkeypatch.setattr('tfs.hdf.tables', None)
        with pytest.raises(ImportError) as e:
            write_hdf(out_file, df_example)
        assert 'tables' in str(e)

        with pytest.raises(ImportError) as e:
            read_hdf(out_file)
        assert 'tables' in str(e)
예제 #5
0
파일: test_hdf.py 프로젝트: pylhc/tfs
    def test_write_compression(self, tmp_path: Path):
        """Test that compression works and compressed files are readable."""
        n = 1000
        df_example = TfsDataFrame(
            data=np.zeros([n, n]),  # highly compressible data
            headers={"Random": "Data"})

        out_file = tmp_path / "data_frame.h5"
        write_hdf(out_file, df_example, complevel=0)
        assert out_file.is_file()

        out_file_compressed = tmp_path / "data_frame_comp.h5"
        write_hdf(out_file_compressed, df_example, complevel=9)
        assert out_file_compressed.is_file()

        assert out_file.stat().st_size > out_file_compressed.stat().st_size

        df_read = read_hdf(out_file)
        assert_tfs_frame_equal(df_example, df_read)

        df_read_compressed = read_hdf(out_file_compressed)
        assert_tfs_frame_equal(df_example, df_read_compressed)
예제 #6
0
파일: test_hdf.py 프로젝트: pylhc/tfs
    def test_write_key_and_mode(self, tmp_path: Path, df_example: TfsDataFrame,
                                caplog):
        """Test the functionality/error handling of the kwars ``key`` and ``mode`` in ``write_hdf``"""
        out_file = tmp_path / "data_frame.h5"
        with pytest.raises(AttributeError) as e:
            write_hdf(out_file, df_example, key="something")
        assert 'key' in str(e)

        write_hdf(out_file, df_example, mode='a')  # creates file
        assert "mode" in caplog.text
        assert out_file.is_file()

        with pytest.raises(AttributeError) as e:
            write_hdf(out_file, df_example,
                      mode='a')  # tries to append to file
        assert 'mode' in str(e)