コード例 #1
0
def test_write_to_any_location(temp_h5_file):

    loc = "path/path_1"

    class ContainerA(Container):

        a = Field(0, "some integer field")

    a = ContainerA()
    a.a = 1

    with HDF5TableWriter(temp_h5_file, group_name=loc + "/group_1") as h5:

        for _ in range(5):

            h5.write("table", a)
            h5.write("deeper/table2", a)

    with HDF5TableReader(temp_h5_file) as h5:

        for a in h5.read("/" + loc + "/group_1/table", ContainerA()):

            assert a.a == 1

    with HDF5TableReader(temp_h5_file) as h5:

        for a in h5.read("/" + loc + "/group_1/deeper/table2", ContainerA()):

            assert a.a == 1
コード例 #2
0
def test_read_multiple_containers():
    hillas_parameter_container = HillasParametersContainer(x=1 * u.m,
                                                           y=1 * u.m,
                                                           length=1 * u.m,
                                                           width=1 * u.m)

    leakage_container = LeakageContainer(
        pixels_width_1=0.1,
        pixels_width_2=0.1,
        intensity_width_1=0.1,
        intensity_width_2=0.1,
    )
    with tempfile.NamedTemporaryFile() as f:
        with HDF5TableWriter(f.name, group_name="dl1",
                             add_prefix=True) as writer:
            writer.write("params",
                         [hillas_parameter_container, leakage_container])

        df = pd.read_hdf(f.name, key="/dl1/params")
        assert "hillas_x" in df.columns
        assert "leakage_pixels_width_1" in df.columns

        # test reading both containers separately
        with HDF5TableReader(f.name) as reader:
            generator = reader.read("/dl1/params",
                                    HillasParametersContainer(),
                                    prefixes=True)
            hillas = next(generator)
        for value, read_value in zip(
                hillas_parameter_container.as_dict().values(),
                hillas.as_dict().values()):
            np.testing.assert_equal(value, read_value)

        with HDF5TableReader(f.name) as reader:
            generator = reader.read("/dl1/params",
                                    LeakageContainer(),
                                    prefixes=True)
            leakage = next(generator)
        for value, read_value in zip(leakage_container.as_dict().values(),
                                     leakage.as_dict().values()):
            np.testing.assert_equal(value, read_value)

        # test reading both containers simultaneously
        with HDF5TableReader(f.name) as reader:
            generator = reader.read(
                "/dl1/params",
                [HillasParametersContainer(),
                 LeakageContainer()],
                prefixes=True,
            )
            hillas_, leakage_ = next(generator)

        for value, read_value in zip(leakage_container.as_dict().values(),
                                     leakage_.as_dict().values()):
            np.testing.assert_equal(value, read_value)

        for value, read_value in zip(
                hillas_parameter_container.as_dict().values(),
                hillas_.as_dict().values()):
            np.testing.assert_equal(value, read_value)
コード例 #3
0
def test_read_without_prefixes(tmp_path):
    path = tmp_path / "test.h5"

    hillas_parameter_container = HillasParametersContainer(fov_lon=1 * u.deg,
                                                           fov_lat=1 * u.deg,
                                                           length=1 * u.deg,
                                                           width=1 * u.deg)

    leakage_container = LeakageContainer(
        pixels_width_1=0.1,
        pixels_width_2=0.1,
        intensity_width_1=0.1,
        intensity_width_2=0.1,
    )

    with HDF5TableWriter(path, group_name="dl1", add_prefix=False) as writer:
        writer.write("params", [hillas_parameter_container, leakage_container])

    df = pd.read_hdf(path, key="/dl1/params")
    assert "fov_lon" in df.columns
    assert "pixels_width_1" in df.columns

    # call with prefixes=False
    with HDF5TableReader(path) as reader:
        generator = reader.read(
            "/dl1/params",
            [HillasParametersContainer(),
             LeakageContainer()],
            prefixes=False,
        )
        hillas_, leakage_ = next(generator)

    for value, read_value in zip(leakage_container.as_dict().values(),
                                 leakage_.as_dict().values()):
        np.testing.assert_equal(value, read_value)

    for value, read_value in zip(hillas_parameter_container.as_dict().values(),
                                 hillas_.as_dict().values()):
        np.testing.assert_equal(value, read_value)

    # call with manually removed prefixes
    with HDF5TableReader(path) as reader:
        generator = reader.read(
            "/dl1/params",
            [
                HillasParametersContainer(prefix=""),
                LeakageContainer(prefix="")
            ],
            prefixes=True,
        )
        hillas_, leakage_ = next(generator)

    for value, read_value in zip(leakage_container.as_dict().values(),
                                 leakage_.as_dict().values()):
        np.testing.assert_equal(value, read_value)

    for value, read_value in zip(hillas_parameter_container.as_dict().values(),
                                 hillas_.as_dict().values()):
        np.testing.assert_equal(value, read_value)
コード例 #4
0
def test_read_multiple_containers(tmp_path):
    path = tmp_path / "test_append.h5"
    hillas_parameter_container = HillasParametersContainer(fov_lon=1 * u.deg,
                                                           fov_lat=1 * u.deg,
                                                           length=1 * u.deg,
                                                           width=1 * u.deg)

    leakage_container = LeakageContainer(
        pixels_width_1=0.1,
        pixels_width_2=0.1,
        intensity_width_1=0.1,
        intensity_width_2=0.1,
    )
    with HDF5TableWriter(path, group_name="dl1", add_prefix=True) as writer:
        writer.write("params", [hillas_parameter_container, leakage_container])

    df = pd.read_hdf(path, key="/dl1/params")
    assert "hillas_fov_lon" in df.columns
    assert "leakage_pixels_width_1" in df.columns

    # test reading both containers separately
    with HDF5TableReader(path) as reader:
        generator = reader.read("/dl1/params",
                                HillasParametersContainer(),
                                prefixes=True)
        hillas = next(generator)
    for value, read_value in zip(hillas_parameter_container.as_dict().values(),
                                 hillas.as_dict().values()):
        np.testing.assert_equal(value, read_value)

    with HDF5TableReader(path) as reader:
        generator = reader.read("/dl1/params",
                                LeakageContainer(),
                                prefixes=True)
        leakage = next(generator)
    for value, read_value in zip(leakage_container.as_dict().values(),
                                 leakage.as_dict().values()):
        np.testing.assert_equal(value, read_value)

    # test reading both containers simultaneously
    with HDF5TableReader(path) as reader:
        generator = reader.read(
            "/dl1/params",
            [HillasParametersContainer(),
             LeakageContainer()],
            prefixes=True,
        )
        hillas_, leakage_ = next(generator)

    for value, read_value in zip(leakage_container.as_dict().values(),
                                 leakage_.as_dict().values()):
        np.testing.assert_equal(value, read_value)

    for value, read_value in zip(hillas_parameter_container.as_dict().values(),
                                 hillas_.as_dict().values()):
        np.testing.assert_equal(value, read_value)
コード例 #5
0
def test_read_without_prefixes():
    hillas_parameter_container = HillasParametersContainer(
        x=1 * u.m, y=1 * u.m, length=1 * u.m, width=1 * u.m
    )

    leakage_container = LeakageContainer(
        pixels_width_1=0.1,
        pixels_width_2=0.1,
        intensity_width_1=0.1,
        intensity_width_2=0.1,
    )
    with tempfile.NamedTemporaryFile() as f:
        with HDF5TableWriter(f.name, group_name="dl1", add_prefix=False) as writer:
            writer.write("params", [hillas_parameter_container, leakage_container])

        df = pd.read_hdf(f.name, key="/dl1/params")
        assert "x" in df.columns
        assert "pixels_width_1" in df.columns

        # call with prefixes=False
        with HDF5TableReader(f.name) as reader:
            generator = reader.read(
                "/dl1/params",
                [HillasParametersContainer(), LeakageContainer()],
                prefixes=False,
            )
            hillas_, leakage_ = next(generator)

        for value, read_value in zip(
            leakage_container.as_dict().values(), leakage_.as_dict().values()
        ):
            np.testing.assert_equal(value, read_value)

        for value, read_value in zip(
            hillas_parameter_container.as_dict().values(), hillas_.as_dict().values()
        ):
            np.testing.assert_equal(value, read_value)

        # call with manually removed prefixes
        with HDF5TableReader(f.name) as reader:
            generator = reader.read(
                "/dl1/params",
                [HillasParametersContainer(prefix=""), LeakageContainer(prefix="")],
                prefixes=True,
            )
            hillas_, leakage_ = next(generator)

        for value, read_value in zip(
            leakage_container.as_dict().values(), leakage_.as_dict().values()
        ):
            np.testing.assert_equal(value, read_value)

        for value, read_value in zip(
            hillas_parameter_container.as_dict().values(), hillas_.as_dict().values()
        ):
            np.testing.assert_equal(value, read_value)
コード例 #6
0
ファイル: test_hdf5.py プロジェクト: ssteinmassl/ctapipe
def test_column_transforms(tmp_path):
    """ ensure a user-added column transform is applied """
    tmp_file = tmp_path / "test_column_transforms.hdf5"

    class SomeContainer(Container):
        value = Field(-1, "some value that should be transformed")

    cont = SomeContainer()

    def my_transform(x):
        """ makes a length-3 array from x"""
        return np.ones(3) * x

    with HDF5TableWriter(tmp_file, group_name="data") as writer:
        # add user generated transform for the "value" column
        cont.value = 6.0
        writer.add_column_transform("mytable", "value", my_transform)
        writer.write("mytable", cont)

    # check that we get a length-3 array when reading back
    with HDF5TableReader(tmp_file, mode="r") as reader:
        for data in reader.read("/data/mytable", SomeContainer()):
            print(data)
            assert data.value.shape == (3, )
            assert np.allclose(data.value, [6.0, 6.0, 6.0])
コード例 #7
0
ファイル: test_hdf5.py プロジェクト: ssteinmassl/ctapipe
def test_read_container(temp_h5_file):
    r0tel1 = R0CameraContainer()
    r0tel2 = R0CameraContainer()
    mc = MCEventContainer()

    with HDF5TableReader(temp_h5_file) as reader:

        # get the generators for each table
        mctab = reader.read("/R0/MC", mc)
        r0tab1 = reader.read("/R0/tel_001", r0tel1)
        r0tab2 = reader.read("/R0/tel_002", r0tel2)

        # read all 3 tables in sync
        for ii in range(3):

            m = next(mctab)
            r0_1 = next(r0tab1)
            r0_2 = next(r0tab2)

            print("MC:", m)
            print("t0:", r0_1.waveform)
            print("t1:", r0_2.waveform)
            print("---------------------------")

        assert "test_attribute" in r0_1.meta
        assert r0_1.meta["date"] == "2020-10-10"
コード例 #8
0
def test_append_mode(temp_h5_file):
    class ContainerA(Container):

        a = Field(int)

    a = ContainerA()
    a.a = 1

    # First open with 'w' mode to clear the file and add a Container
    with HDF5TableWriter(temp_h5_file, 'group') as h5:

        h5.write('table_1', a)

    # Try to append A again
    with HDF5TableWriter(temp_h5_file, 'group', mode='a') as h5:

        h5.write('table_2', a)

    # Check if file has two tables with a = 1
    with HDF5TableReader(temp_h5_file) as h5:

        for a in h5.read('/group/table_1', ContainerA()):

            assert a.a == 1

        for a in h5.read('/group/table_2', ContainerA()):

            assert a.a == 1
コード例 #9
0
ファイル: test_hdf5.py プロジェクト: HealthyPear/ctapipe
def test_read_duplicated_container_types(tmp_path):
    path = tmp_path / "test.h5"

    hillas_config_1 = HillasParametersContainer(
        x=1 * u.m, y=2 * u.m, length=3 * u.m, width=4 * u.m, prefix="hillas_1"
    )
    hillas_config_2 = HillasParametersContainer(
        x=2 * u.m, y=3 * u.m, length=4 * u.m, width=5 * u.m, prefix="hillas_2"
    )

    with HDF5TableWriter(path, group_name="dl1", add_prefix=True) as writer:
        writer.write("params", [hillas_config_1, hillas_config_2])

    df = pd.read_hdf(path, key="/dl1/params")
    assert "hillas_1_x" in df.columns
    assert "hillas_2_x" in df.columns

    with HDF5TableReader(path) as reader:
        generator = reader.read(
            "/dl1/params",
            [HillasParametersContainer(), HillasParametersContainer()],
            prefixes=["hillas_1", "hillas_2"],
        )
        hillas_1, hillas_2 = next(generator)

    for value, read_value in zip(
        hillas_config_1.as_dict().values(), hillas_1.as_dict().values()
    ):
        np.testing.assert_equal(value, read_value)

    for value, read_value in zip(
        hillas_config_2.as_dict().values(), hillas_2.as_dict().values()
    ):
        np.testing.assert_equal(value, read_value)
コード例 #10
0
def test_fixed_point_column_transform(tmp_path):
    """ ensure a user-added column transform is applied """
    from ctapipe.io.tableio import FixedPointColumnTransform

    tmp_file = tmp_path / "test_column_transforms.hdf5"

    class SomeContainer(Container):
        container_prefix = ""
        image = Field(np.array([np.nan, np.inf, -np.inf]))

    cont = SomeContainer()

    with HDF5TableWriter(tmp_file, group_name="data") as writer:
        writer.add_column_transform(
            "signed", "image",
            FixedPointColumnTransform(100, 0, np.float64, np.int32))
        writer.add_column_transform(
            "unsigned",
            "image",
            FixedPointColumnTransform(100, 0, np.float64, np.uint32),
        )
        # add user generated transform for the "value" column
        writer.write("signed", cont)
        writer.write("unsigned", cont)

    with HDF5TableReader(tmp_file, mode="r") as reader:
        signed = next(reader.read("/data/signed", SomeContainer()))
        unsigned = next(reader.read("/data/unsigned", SomeContainer()))

        for data in (signed, unsigned):
            # check we get our original nans back
            assert np.isnan(data.image[0])
            assert np.isposinf(data.image[1])
            assert np.isneginf(data.image[2])
コード例 #11
0
def test_column_transforms_regexps(tmp_path):
    """ ensure a user-added column transform is applied when given as a regexp"""

    tmp_file = tmp_path / "test_column_transforms.hdf5"

    def multiply_by_10(x):
        return x * 10

    class SomeContainer(Container):
        container_prefix = ""
        hillas_x = Field(1)
        hillas_y = Field(1)

    cont = SomeContainer()

    with HDF5TableWriter(tmp_file, group_name="data") as writer:
        writer.add_column_transform_regexp("my.*", "hillas_.*", multiply_by_10)
        writer.add_column_transform_regexp("anothertable", "hillas_y",
                                           multiply_by_10)

        writer.write("mytable", cont)
        writer.write("anothertable", cont)

    # check that we get back the transformed values (note here a round trip will
    # not work, as there is no inverse transform in this test)
    with HDF5TableReader(tmp_file, mode="r") as reader:
        data = next(reader.read("/data/mytable", SomeContainer()))
        assert data.hillas_x == 10
        assert data.hillas_y == 10

        data = next(reader.read("/data/anothertable", SomeContainer()))
        assert data.hillas_x == 1
        assert data.hillas_y == 10
コード例 #12
0
def test_column_transforms(tmp_path):
    """ ensure a user-added column transform is applied """
    from ctapipe.containers import NAN_TIME
    from ctapipe.io.tableio import FixedPointColumnTransform

    tmp_file = tmp_path / "test_column_transforms.hdf5"

    class SomeContainer(Container):
        container_prefix = ""

        current = Field(1 * u.A, unit=u.uA)
        time = Field(NAN_TIME)
        image = Field(np.array([1.234, 123.456]))

    cont = SomeContainer()

    with HDF5TableWriter(tmp_file, group_name="data") as writer:
        writer.add_column_transform(
            "mytable", "image",
            FixedPointColumnTransform(100, 0, np.float64, np.int32))
        # add user generated transform for the "value" column
        writer.write("mytable", cont)

    # check that we get a length-3 array when reading back
    with HDF5TableReader(tmp_file, mode="r") as reader:
        data = next(reader.read("/data/mytable", SomeContainer()))
        assert data.current.value == 1e6
        assert data.current.unit == u.uA
        assert isinstance(data.time, Time)
        assert data.time == NAN_TIME
        # rounded to two digits
        assert np.all(data.image == np.array([1.23, 123.45]))
コード例 #13
0
def test_read_container(temp_h5_file):
    r0tel1 = R0CameraContainer()
    r0tel2 = R0CameraContainer()
    mc = MCEventContainer()

    reader = HDF5TableReader(str(temp_h5_file))

    # get the generators for each table
    mctab = reader.read('/R0/MC', mc)
    r0tab1 = reader.read('/R0/tel_001', r0tel1)
    r0tab2 = reader.read('/R0/tel_002', r0tel2)

    # read all 3 tables in sync
    for ii in range(3):

        m = next(mctab)
        r0_1 = next(r0tab1)
        r0_2 = next(r0tab2)

        print("MC:", m)
        print("t0:", r0_1.image)
        print("t1:", r0_2.image)
        print("---------------------------")

    assert 'test_attribute' in r0_1.meta
    assert r0_1.meta['date'] == "2020-10-10"

    reader.close()
コード例 #14
0
def test_reader_closes_file(temp_h5_file):

    with HDF5TableReader(str(temp_h5_file)) as h5_table:

        assert h5_table._h5file.isopen == True

    assert h5_table._h5file.isopen == False
コード例 #15
0
def test_reader_closes_file(temp_h5_file):

    with HDF5TableReader(temp_h5_file) as h5_table:

        assert h5_table._h5file.isopen == 1

    assert h5_table._h5file.isopen == 0
コード例 #16
0
def test_read_container(temp_h5_file):
    r0tel1 = R0CameraContainer()
    r0tel2 = R0CameraContainer()
    sim_shower = SimulatedShowerContainer()

    with HDF5TableReader(temp_h5_file) as reader:

        # get the generators for each table
        # test supplying a single container as well as an
        # iterable with one entry only
        simtab = reader.read("/R0/sim_shower", (sim_shower, ))
        r0tab1 = reader.read("/R0/tel_001", r0tel1)
        r0tab2 = reader.read("/R0/tel_002", r0tel2)

        # read all 3 tables in sync
        for ii in range(3):

            m = next(simtab)[0]
            r0_1 = next(r0tab1)
            r0_2 = next(r0tab2)

            print("sim_shower:", m)
            print("t0:", r0_1.waveform)
            print("t1:", r0_2.waveform)
            print("---------------------------")

        assert "test_attribute" in r0_1.meta
        assert r0_1.meta["date"] == "2020-10-10"
コード例 #17
0
ファイル: calibrator.py プロジェクト: adonini/cta-lstchain
    def _initialize_correction(self):
        """
        Read the correction from hdf5 calibration file
        """

        self.mon_data.tels_with_data = self.allowed_tels
        self.log.info(f"read {self.calibration_path}")

        try:
            with HDF5TableReader(self.calibration_path) as h5_table:
                assert h5_table._h5file.isopen == True
                for telid in self.allowed_tels:
                    # read the calibration data for the moment only one event
                    table = '/tel_' + str(telid) + '/calibration'
                    next(
                        h5_table.read(table,
                                      self.mon_data.tel[telid].calibration))
                    # eliminate inf values (should be done probably before)
                    dc_to_pe = self.mon_data.tel[telid].calibration.dc_to_pe

                    dc_to_pe[np.isinf(dc_to_pe)] = 0
                    self.log.info(
                        f"read {self.mon_data.tel[telid].calibration.dc_to_pe}"
                    )
        except:
            self.log.error(
                f"Problem in reading calibration file {self.calibration_path}")
コード例 #18
0
def test_read_whole_table(temp_h5_file):

    sim_shower = SimulatedShowerContainer()

    with HDF5TableReader(temp_h5_file) as reader:

        for cont in reader.read("/R0/sim_shower", sim_shower):
            print(cont)
コード例 #19
0
ファイル: test_hdf5.py プロジェクト: ssteinmassl/ctapipe
def test_read_whole_table(temp_h5_file):

    mc = MCEventContainer()

    with HDF5TableReader(temp_h5_file) as reader:

        for cont in reader.read("/R0/MC", mc):
            print(cont)
コード例 #20
0
def test_read_whole_table(temp_h5_file):

    mc = MCEventContainer()

    reader = HDF5TableReader(str(temp_h5_file))

    for cont in reader.read('/R0/MC', mc):
        print(cont)

    reader.close()
コード例 #21
0
def test_with_context_reader(test_h5_file):

    with HDF5TableReader(test_h5_file) as h5_table:

        assert h5_table._h5file.isopen == 1

        for cont in h5_table.read("/R0/sim_shower", SimulatedShowerContainer):
            print(cont)

    assert h5_table._h5file.isopen == 0
コード例 #22
0
def test_write_to_any_location(tmp_path):
    path = tmp_path / "test.h5"
    loc = "path/path_1"

    class ContainerA(Container):
        a = Field(0, "some integer field")

    container = ContainerA(a=1)

    with HDF5TableWriter(path, group_name=loc + "/group_1") as h5:
        for _ in range(5):
            h5.write("table", container)
            h5.write("deeper/table2", container)

    with HDF5TableReader(path) as h5:
        for container in h5.read(f"/{loc}/group_1/table", ContainerA):
            assert container.a == 1

    with HDF5TableReader(path) as h5:
        for container in h5.read(f"/{loc}/group_1/deeper/table2", ContainerA):
            assert container.a == 1
コード例 #23
0
ファイル: test_hdf5.py プロジェクト: ssteinmassl/ctapipe
def test_with_context_reader(temp_h5_file):

    mc = MCEventContainer()

    with HDF5TableReader(temp_h5_file) as h5_table:

        assert h5_table._h5file.isopen == 1

        for cont in h5_table.read("/R0/MC", mc):
            print(cont)

    assert h5_table._h5file.isopen == 0
コード例 #24
0
def test_with_context_reader(temp_h5_file):

    mc = MCEventContainer()

    with HDF5TableReader(str(temp_h5_file)) as h5_table:

        assert h5_table._h5file.isopen == True

        for cont in h5_table.read('/R0/MC', mc):
            print(cont)

    assert h5_table._h5file.isopen == False
コード例 #25
0
def test_prefix(tmp_path):
    tmp_file = tmp_path / "test_prefix.hdf5"
    hillas_parameter_container = HillasParametersContainer(x=1 * u.m,
                                                           y=1 * u.m,
                                                           length=1 * u.m,
                                                           width=1 * u.m)

    leakage_container = LeakageContainer(
        pixels_width_1=0.1,
        pixels_width_2=0.1,
        intensity_width_1=0.1,
        intensity_width_2=0.1,
    )

    with HDF5TableWriter(tmp_file.name, group_name="blabla",
                         add_prefix=True) as writer:
        writer.write("events", [hillas_parameter_container, leakage_container])

    df = pd.read_hdf(tmp_file.name, key="/blabla/events")
    assert "hillas_x" in df.columns
    assert "leakage_pixels_width_1" in df.columns

    with HDF5TableReader(tmp_file.name) as reader:
        generator = reader.read("/blabla/events",
                                HillasParametersContainer(),
                                prefix=True)
        hillas = next(generator)
    for value, read_value in zip(hillas_parameter_container.as_dict().values(),
                                 hillas.as_dict().values()):
        np.testing.assert_equal(value, read_value)

    with HDF5TableReader(tmp_file.name) as reader:
        generator = reader.read("/blabla/events",
                                LeakageContainer(),
                                prefix=True)
        leakage = next(generator)
    for value, read_value in zip(leakage_container.as_dict().values(),
                                 leakage.as_dict().values()):
        np.testing.assert_equal(value, read_value)
コード例 #26
0
def test_strings(tmp_path):
    """Test we can write unicode strings"""
    from ctapipe.core import Container
    from ctapipe.io import read_table

    # when not giving a max_len, should be taken from the first container
    class Container1(Container):
        container_prefix = ""
        string = Field("", "test string")

    path = tmp_path / "test.h5"

    strings = ["Hello", "öäα"]

    with HDF5TableWriter(path, mode="w") as writer:
        for string in strings:
            writer.write("strings", Container1(string=string))

    table = read_table(path, "/strings")

    # the α is above the max length estimated from the first element
    assert table["string"].tolist() == ["Hello", "öä"]

    class Container2(Container):
        container_prefix = ""
        string = Field("", "test string", max_length=10)

    path = tmp_path / "test.h5"

    strings = ["Hello", "öäα", "12345678910"]
    expected = ["Hello", "öäα", "1234567891"]

    with HDF5TableWriter(path, mode="w") as writer:
        for string in strings:
            writer.write("strings", Container2(string=string))

    table = read_table(path, "/strings")

    # the α is above the max length estimated from the first element
    assert table["string"].tolist() == expected

    # test this also works with table reader
    with HDF5TableReader(path) as reader:
        generator = reader.read("/strings", Container2)
        for string in expected:
            c = next(generator)
            assert c.string == string
コード例 #27
0
def test_write_large_integer():
    class C(Container):
        value = Field(True, "Integer value")

    exps = [15, 31, 63]
    with tempfile.NamedTemporaryFile() as f:
        with HDF5TableWriter(f.name, "test") as writer:
            for exp in exps:
                c = C(value=2**exp - 1)
                writer.write("c", c)

        c = C()
        with HDF5TableReader(f.name) as reader:
            c_reader = reader.read("/test/c", c)
            for exp in exps:
                cur = next(c_reader)
                assert cur.value == 2**exp - 1
コード例 #28
0
def test_write_bool():
    class C(Container):
        boolean = Field(True, "Boolean value")

    with tempfile.NamedTemporaryFile() as f:
        with HDF5TableWriter(f.name, "test") as writer:
            for i in range(2):
                c = C(boolean=(i % 2 == 0))
                writer.write("c", c)

        c = C()
        with HDF5TableReader(f.name) as reader:
            c_reader = reader.read("/test/c", c)
            for i in range(2):
                cur = next(c_reader)
                expected = (i % 2) == 0
                assert isinstance(cur.boolean, np.bool_)
                assert cur.boolean == expected
コード例 #29
0
def test_write_large_integer(tmp_path):
    path = tmp_path / "test.h5"

    class C(Container):
        value = Field(True, "Integer value")

    exps = [15, 31, 63]
    with HDF5TableWriter(path, "test") as writer:
        for exp in exps:
            c = C(value=2**exp - 1)
            writer.write("c", c)

    c = C()
    with HDF5TableReader(path) as reader:
        c_reader = reader.read("/test/c", c)
        for exp in exps:
            cur = next(c_reader)
            assert cur.value == 2**exp - 1
コード例 #30
0
def test_read_write_container_with_int_enum(tmp_path):
    tmp_file = tmp_path / "container_with_int_enum.hdf5"

    def create_stream(n_event):
        data = WithIntEnum()
        for i in range(n_event):
            data.event_type = data.EventType(i % 3 + 1)
            yield data

    with HDF5TableWriter(tmp_file, group_name="data") as h5_table:
        for data in create_stream(10):
            h5_table.write("table", data)

    with HDF5TableReader(tmp_file, mode="r") as h5_table:
        for group_name in ["data/"]:
            group_name = "/{}table".format(group_name)
            for data in h5_table.read(group_name, WithIntEnum()):
                assert isinstance(data.event_type, WithIntEnum.EventType)