예제 #1
0
    def test_hdf5_reading(self) -> None:

        data = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10))

        with h5py.File(self.test_dir + 'data/PynPoint_database.hdf5',
                       'a') as hdf_file:
            hdf_file.create_dataset('extra', data=data)
            hdf_file.create_dataset('header_extra/PARANG',
                                    data=[1., 2., 3., 4.])

        read = Hdf5ReadingModule(name_in='read1',
                                 input_filename='PynPoint_database.hdf5',
                                 input_dir=self.test_dir + 'data',
                                 tag_dictionary={'images': 'images'})

        self.pipeline.add_module(read)
        self.pipeline.run_module('read1')

        data = self.pipeline.get_data('images')
        assert np.allclose(data[0, 75, 25],
                           6.921353838812206e-05,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(np.mean(data),
                           1.0506056979365338e-06,
                           rtol=limit,
                           atol=0.)
        assert data.shape == (10, 100, 100)
예제 #2
0
    def test_hdf5_reading(self):

        data = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10))

        h5f = h5py.File(self.test_dir + "data/PynPoint_database.hdf5", "a")
        h5f.create_dataset("extra", data=data)
        h5f.create_dataset("header_extra/PARANG", data=[1., 2., 3., 4.])
        h5f.close()

        read = Hdf5ReadingModule(name_in="read1",
                                 input_filename="PynPoint_database.hdf5",
                                 input_dir=self.test_dir + "data",
                                 tag_dictionary={"images": "images"})

        self.pipeline.add_module(read)
        self.pipeline.run_module("read1")

        data = self.pipeline.get_data("images")
        assert np.allclose(data[0, 75, 25],
                           6.921353838812206e-05,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(np.mean(data),
                           1.0506056979365338e-06,
                           rtol=limit,
                           atol=0.)
        assert data.shape == (10, 100, 100)
예제 #3
0
    def test_hdf5_reading(self):

        read = Hdf5ReadingModule(name_in='read',
                                 input_filename='test.hdf5',
                                 input_dir=self.test_dir,
                                 tag_dictionary={
                                     'data1': 'data1',
                                     'data2': 'data2'
                                 })

        self.pipeline.add_module(read)
        self.pipeline.run_module('read')

        data1 = self.pipeline.get_data('data1')
        data2 = self.pipeline.get_data('data2')
        data3 = self.pipeline.get_data('images')
        assert np.allclose(data1, data2, rtol=limit, atol=0.)
        assert np.allclose(data2, data3, rtol=limit, atol=0.)

        attribute1 = self.pipeline.get_attribute('images',
                                                 'PARANG',
                                                 static=False)
        attribute2 = self.pipeline.get_attribute('data1',
                                                 'PARANG',
                                                 static=False)
        attribute3 = self.pipeline.get_attribute('data2',
                                                 'PARANG',
                                                 static=False)
        assert np.allclose(attribute1, attribute2, rtol=limit, atol=0.)
        assert np.allclose(attribute2, attribute3, rtol=limit, atol=0.)
예제 #4
0
    def test_wrong_tag(self):

        read = Hdf5ReadingModule(name_in="read3",
                                 input_filename="PynPoint_database.hdf5",
                                 input_dir=self.test_dir+"data",
                                 tag_dictionary={"test":"test"})

        self.pipeline.add_module(read)
        self.pipeline.run_module("read3")

        h5f = h5py.File(self.test_dir+"data/PynPoint_database.hdf5", "r")
        assert set(h5f.keys()) == set(["extra", "header_extra", "header_images", "images"])
        h5f.close()
예제 #5
0
    def test_no_input_filename(self):

        read = Hdf5ReadingModule(name_in="read4",
                                 input_filename=None,
                                 input_dir=self.test_dir+"data",
                                 tag_dictionary=None)

        self.pipeline.add_module(read)
        self.pipeline.run_module("read4")

        data = self.pipeline.get_data("images")
        assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.)
        assert data.shape == (10, 100, 100)
예제 #6
0
    def test_dictionary_none(self):

        read = Hdf5ReadingModule(name_in='read2',
                                 input_filename='PynPoint_database.hdf5',
                                 input_dir=self.test_dir+'data',
                                 tag_dictionary=None)

        self.pipeline.add_module(read)
        self.pipeline.run_module('read2')

        data = self.pipeline.get_data('images')
        assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.)
        assert data.shape == (10, 100, 100)
예제 #7
0
    def test_no_input_filename(self) -> None:

        module = Hdf5ReadingModule(name_in='read4',
                                   input_filename=None,
                                   input_dir=self.test_dir + 'data',
                                   tag_dictionary=None)

        self.pipeline.add_module(module)
        self.pipeline.run_module('read4')

        data = self.pipeline.get_data('images')
        assert np.sum(data) == pytest.approx(0.007153603490533874,
                                             rel=self.limit,
                                             abs=0.)
        assert data.shape == (5, 11, 11)
예제 #8
0
    def test_wrong_tag(self):

        read = Hdf5ReadingModule(name_in='read3',
                                 input_filename='PynPoint_database.hdf5',
                                 input_dir=self.test_dir+'data',
                                 tag_dictionary={'test':'test'})

        self.pipeline.add_module(read)

        with pytest.warns(UserWarning) as warning:
            self.pipeline.run_module('read3')

        assert len(warning) == 1
        assert warning[0].message.args[0] == 'The dataset with tag name \'test\' is not found in ' \
                                             'the HDF5 file.'

        with h5py.File(self.test_dir+'data/PynPoint_database.hdf5', 'r') as hdf_file:
            assert set(hdf_file.keys()) == set(['extra', 'header_extra', 'header_images', 'images'])
예제 #9
0
    def test_wrong_tag(self):

        read = Hdf5ReadingModule(name_in="read3",
                                 input_filename="PynPoint_database.hdf5",
                                 input_dir=self.test_dir + "data",
                                 tag_dictionary={"test": "test"})

        self.pipeline.add_module(read)

        with pytest.warns(UserWarning) as warning:
            self.pipeline.run_module("read3")

        assert len(warning) == 1
        assert warning[0].message.args[0] == "The dataset with tag name 'test' is not found in " \
                                             "the HDF5 file."

        h5f = h5py.File(self.test_dir + "data/PynPoint_database.hdf5", "r")
        assert set(h5f.keys()) == set(
            ["extra", "header_extra", "header_images", "images"])
        h5f.close()
예제 #10
0
    def test_hdf5_reading(self):

        read = Hdf5ReadingModule(name_in="read",
                                 input_filename="test.hdf5",
                                 input_dir=self.test_dir,
                                 tag_dictionary={"data1":"data1", "data2":"data2"})

        self.pipeline.add_module(read)
        self.pipeline.run_module("read")

        data1 = self.pipeline.get_data("data1")
        data2 = self.pipeline.get_data("data2")
        data3 = self.pipeline.get_data("images")
        assert np.allclose(data1, data2, rtol=limit, atol=0.)
        assert np.allclose(data2, data3, rtol=limit, atol=0.)

        attribute1 = self.pipeline.get_attribute("images", "PARANG", static=False)
        attribute2 = self.pipeline.get_attribute("data1", "PARANG", static=False)
        attribute3 = self.pipeline.get_attribute("data2", "PARANG", static=False)
        assert np.allclose(attribute1, attribute2, rtol=limit, atol=0.)
        assert np.allclose(attribute2, attribute3, rtol=limit, atol=0.)
예제 #11
0
    def test_hdf5_reading(self) -> None:

        data = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10))

        with h5py.File(self.test_dir + 'data/PynPoint_database.hdf5',
                       'a') as hdf_file:
            hdf_file.create_dataset('extra', data=data)
            hdf_file.create_dataset('header_extra/PARANG',
                                    data=[1., 2., 3., 4.])

        module = Hdf5ReadingModule(name_in='read1',
                                   input_filename='PynPoint_database.hdf5',
                                   input_dir=self.test_dir + 'data',
                                   tag_dictionary={'images': 'images'})

        self.pipeline.add_module(module)
        self.pipeline.run_module('read1')

        data = self.pipeline.get_data('images')
        assert np.sum(data) == pytest.approx(0.007153603490533874,
                                             rel=self.limit,
                                             abs=0.)
        assert data.shape == (5, 11, 11)