def test_remove_module(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in='read') pipeline.add_module(read) process = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='im_arr1', image_out_tag='im_out') pipeline.add_module(process) assert pipeline.get_module_names() == ['read', 'badpixel'] assert pipeline.remove_module('read') assert pipeline.get_module_names() == ['badpixel'] assert pipeline.remove_module('badpixel') with pytest.warns(UserWarning) as warning: pipeline.remove_module('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Module name \'test\' not found in the Pypeline ' \ 'dictionary.' os.remove(self.test_dir + 'PynPoint_database.hdf5')
def test_remove_module(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) module = FitsReadingModule(name_in='read') pipeline.add_module(module) module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='im_arr1', image_out_tag='im_out') pipeline.add_module(module) assert pipeline.get_module_names() == ['read', 'badpixel'] assert pipeline.remove_module('read') assert pipeline.get_module_names() == ['badpixel'] assert pipeline.remove_module('badpixel') with pytest.warns(UserWarning) as warning: pipeline.remove_module('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Pipeline module \'test\' is not found in the ' \ 'Pypeline dictionary so it could not be removed. ' \ 'The dictionary contains the following modules: [].' \ os.remove(self.test_dir + 'PynPoint_database.hdf5')
def test_get_shape(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in="read", image_tag="images") pipeline.add_module(read) pipeline.run_module("read") assert pipeline.get_shape("images") == (10, 100, 100)
def test_get_shape(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in='read', image_tag='images') pipeline.add_module(read) pipeline.run_module('read') assert pipeline.get_shape('images') == (10, 100, 100)
class TestFilter: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir + 'data') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='data', input_dir=self.test_dir + 'data', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('data') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_gaussian_filter(self) -> None: module = GaussianFilterModule(name_in='filter', image_in_tag='data', image_out_tag='filtered', fwhm=0.1) self.pipeline.add_module(module) self.pipeline.run_module('filter') data = self.pipeline.get_data('filtered') assert np.allclose(data[0, 50, 50], 0.0388143943049942, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738068, rtol=limit, atol=0.) assert data.shape == (40, 100, 100)
def test_add_wrong_module(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) with pytest.raises(AssertionError) as error: pipeline.add_module(None) assert str(error.value) == 'The added module is not a valid Pypeline module.' os.remove(self.test_dir+'PynPoint_database.hdf5')
def test_add_wrong_module(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) with pytest.raises(TypeError) as error: pipeline.add_module(None) assert str(error.value) == 'type of argument "module" must be ' \ 'pynpoint.core.processing.PypelineModule; got NoneType instead' os.remove(self.test_dir + 'PynPoint_database.hdf5')
def test_get_shape(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) module = FitsReadingModule(name_in='read', image_tag='images') pipeline.add_module(module) pipeline.run_module('read') assert pipeline.get_shape('images') == (5, 11, 11)
class TestFilter: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir + 'data') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='data', input_dir=self.test_dir + 'data', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('data') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_gaussian_filter(self) -> None: module = GaussianFilterModule(name_in='filter', image_in_tag='data', image_out_tag='filtered', fwhm=0.1) self.pipeline.add_module(module) self.pipeline.run_module('filter') data = self.pipeline.get_data('filtered') assert np.sum(data) == pytest.approx(105.54278879805275, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11)
def test_run_module(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in="read", image_tag="im_arr") assert pipeline.add_module(read) is None assert pipeline.run_module("read") is None os.remove(self.test_dir + "PynPoint_database.hdf5")
def test_run_module(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in='read', image_tag='im_arr') assert pipeline.add_module(read) is None assert pipeline.run_module('read') is None os.remove(self.test_dir + 'PynPoint_database.hdf5')
def test_add_module(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in='read1', input_dir=None, image_tag='im_arr1') assert pipeline.add_module(read) is None read = FitsReadingModule(name_in='read2', input_dir=self.test_dir, image_tag='im_arr2') assert pipeline.add_module(read) is None with pytest.warns(UserWarning) as warning: pipeline.add_module(read) assert len(warning) == 1 assert warning[0].message.args[0] == 'Pipeline module names need to be unique. ' \ 'Overwriting module \'read2\'.' process = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='im_arr1', image_out_tag='im_out') assert pipeline.add_module(process) is None write = FitsWritingModule(name_in='write1', file_name='result.fits', data_tag='im_arr1') assert pipeline.add_module(write) is None write = FitsWritingModule(name_in='write2', file_name='result.fits', data_tag='im_arr1', output_dir=self.test_dir) assert pipeline.add_module(write) is None assert pipeline.run() is None assert pipeline.get_module_names() == ['read1', 'read2', 'badpixel', 'write1', 'write2'] os.remove(self.test_dir+'result.fits') os.remove(self.test_dir+'PynPoint_database.hdf5')
def test_add_module(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) module = FitsReadingModule(name_in='read1', input_dir=None, image_tag='im_arr1') assert pipeline.add_module(module) is None module = FitsReadingModule(name_in='read2', input_dir=self.test_dir, image_tag='im_arr2') assert pipeline.add_module(module) is None with pytest.warns(UserWarning) as warning: pipeline.add_module(module) assert len(warning) == 1 assert warning[0].message.args[0] == 'Names of pipeline modules that are added to the ' \ 'Pypeline need to be unique. The current pipeline ' \ 'module, \'read2\', does already exist in the ' \ 'Pypeline dictionary so the previous module with ' \ 'the same name will be overwritten.' module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='im_arr1', image_out_tag='im_out') assert pipeline.add_module(module) is None module = FitsWritingModule(name_in='write1', file_name='result.fits', data_tag='im_arr1') assert pipeline.add_module(module) is None module = FitsWritingModule(name_in='write2', file_name='result.fits', data_tag='im_arr1', output_dir=self.test_dir) assert pipeline.add_module(module) is None assert pipeline.run() is None assert pipeline.get_module_names() == [ 'read1', 'read2', 'badpixel', 'write1', 'write2' ] os.remove(self.test_dir + 'result.fits') os.remove(self.test_dir + 'PynPoint_database.hdf5')
def test_remove_module(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in="read") pipeline.add_module(read) process = BadPixelSigmaFilterModule(name_in="badpixel") pipeline.add_module(process) assert pipeline.get_module_names() == ["read", "badpixel"] assert pipeline.remove_module("read") assert pipeline.get_module_names() == ["badpixel"] assert pipeline.remove_module("badpixel") with pytest.warns(UserWarning) as warning: pipeline.remove_module("test") assert len(warning) == 1 assert warning[0].message.args[0] == "Module name 'test' not found in the Pypeline " \ "dictionary." os.remove(self.test_dir + "PynPoint_database.hdf5")
def test_run_module_wrong_tag(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) module = FitsReadingModule(name_in='read') pipeline.add_module(module) module = FitsWritingModule(name_in='write', file_name='result.fits', data_tag='im_list') pipeline.add_module(module) module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='im_list', image_out_tag='im_out') pipeline.add_module(module) with pytest.raises(AttributeError) as error: pipeline.run_module('badpixel') assert str(error.value) == 'Pipeline module \'badpixel\' is looking for data under a ' \ 'tag which does not exist in the database.' with pytest.raises(AttributeError) as error: pipeline.run_module('write') assert str(error.value) == 'Pipeline module \'write\' is looking for data under a tag ' \ 'which does not exist in the database.' with pytest.raises(AttributeError) as error: pipeline.run() assert str(error.value) == 'Pipeline module \'write\' is looking for data under a tag ' \ 'which is not created by a previous module or the data does ' \ 'not exist in the database.' assert pipeline.validate_pipeline_module('test') == (False, 'test') with pytest.raises(TypeError) as error: pipeline._validate('module', 'tag') assert str(error.value) == 'type of argument "module" must be one of (ReadingModule, ' \ 'WritingModule, ProcessingModule); got str instead' os.remove(self.test_dir + 'PynPoint_database.hdf5')
def test_add_module(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in="read1", input_dir=None, image_tag="im_arr1") assert pipeline.add_module(read) is None read = FitsReadingModule(name_in="read2", input_dir=self.test_dir, image_tag="im_arr2") assert pipeline.add_module(read) is None with pytest.warns(UserWarning) as warning: pipeline.add_module(read) assert len(warning) == 1 assert warning[0].message.args[0] == "Processing module names need to be unique. " \ "Overwriting module 'read2'." process = BadPixelSigmaFilterModule(name_in="badpixel", image_in_tag="im_arr1") assert pipeline.add_module(process) is None write = FitsWritingModule(name_in="write1", file_name="result.fits", data_tag="im_arr1") assert pipeline.add_module(write) is None write = FitsWritingModule(name_in="write2", file_name="result.fits", data_tag="im_arr1", output_dir=self.test_dir) assert pipeline.add_module(write) is None assert pipeline.run() is None assert pipeline.get_module_names() == [ 'read1', 'read2', 'badpixel', 'write1', 'write2' ] os.remove(self.test_dir + "result.fits") os.remove(self.test_dir + "PynPoint_database.hdf5")
def test_run_module_wrong_tag(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in='read') pipeline.add_module(read) write = FitsWritingModule(name_in='write', file_name='result.fits', data_tag='im_list') pipeline.add_module(write) process = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='im_list', image_out_tag='im_out') pipeline.add_module(process) with pytest.raises(AttributeError) as error: pipeline.run_module('badpixel') assert str(error.value) == 'Pipeline module \'badpixel\' is looking for data under a ' \ 'tag which does not exist in the database.' with pytest.raises(AttributeError) as error: pipeline.run_module('write') assert str(error.value) == 'Pipeline module \'write\' is looking for data under a tag ' \ 'which does not exist in the database.' with pytest.raises(AttributeError) as error: pipeline.run() assert str(error.value) == 'Pipeline module \'write\' is looking for data under a tag ' \ 'which is not created by a previous module or does not exist ' \ 'in the database.' assert pipeline.validate_pipeline_module('test') is None assert pipeline._validate('module', 'tag') == (False, None) os.remove(self.test_dir + 'PynPoint_database.hdf5')
def test_run_module_wrong_tag(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) read = FitsReadingModule(name_in="read") pipeline.add_module(read) write = FitsWritingModule(name_in="write", file_name="result.fits", data_tag="im_list") pipeline.add_module(write) process = BadPixelSigmaFilterModule(name_in="badpixel", image_in_tag="im_list") pipeline.add_module(process) with pytest.raises(AttributeError) as error: pipeline.run_module("badpixel") assert str(error.value) == "Pipeline module 'badpixel' is looking for data under a tag " \ "which does not exist in the database." with pytest.raises(AttributeError) as error: pipeline.run_module("write") assert str(error.value) == "Pipeline module 'write' is looking for data under a tag " \ "which does not exist in the database." with pytest.raises(AttributeError) as error: pipeline.run() assert str(error.value) == "Pipeline module 'write' is looking for data under a tag " \ "which is not created by a previous module or does not exist " \ "in the database." assert pipeline.validate_pipeline_module("test") is None assert pipeline._validate("module", "tag") == (False, None) os.remove(self.test_dir + "PynPoint_database.hdf5")
class TestBackground: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_fake(path=self.test_dir + 'dither', ndit=[20, 20, 20, 20], nframes=[20, 20, 20, 20], exp_no=[1, 2, 3, 4], npix=(100, 100), fwhm=3., x0=[25, 75, 75, 25], y0=[75, 75, 25, 25], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_fake(path=self.test_dir + 'star', ndit=[10, 10, 10, 10], nframes=[10, 10, 10, 10], exp_no=[1, 3, 5, 7], npix=(100, 100), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_fake(path=self.test_dir + 'sky', ndit=[5, 5, 5, 5], nframes=[5, 5, 5, 5], exp_no=[2, 4, 6, 8], npix=(100, 100), fwhm=None, x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_fake(path=self.test_dir + 'line', ndit=[4, 4, 4, 4], nframes=[4, 4, 4, 4], exp_no=[1, 3, 5, 7], npix=(20, 20), fwhm=3., x0=[10, 10, 10, 10], y0=[10, 10, 10, 10], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['dither', 'star', 'sky', 'line']) def test_read_data(self): read = FitsReadingModule(name_in='read1', image_tag='dither', input_dir=self.test_dir + 'dither') self.pipeline.add_module(read) read = FitsReadingModule(name_in='read2', image_tag='star', input_dir=self.test_dir + 'star') self.pipeline.add_module(read) read = FitsReadingModule(name_in='read3', image_tag='sky', input_dir=self.test_dir + 'sky') self.pipeline.add_module(read) read = FitsReadingModule(name_in='read4', image_tag='line', input_dir=self.test_dir + 'line') self.pipeline.add_module(read) self.pipeline.run_module('read1') self.pipeline.run_module('read2') self.pipeline.run_module('read3') self.pipeline.run_module('read4') data = self.pipeline.get_data('dither') assert np.allclose(data[0, 74, 24], 0.05304008435511765, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010033896953157959, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('star') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) data = self.pipeline.get_data('sky') assert np.allclose(data[0, 50, 50], -7.613171257478652e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 8.937360237872607e-07, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('line') assert np.allclose(data[0, 10, 10], 0.09799496683489618, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.002502384977510189, rtol=limit, atol=0.) assert data.shape == (16, 20, 20) def test_simple_background(self): simple = SimpleBackgroundSubtractionModule(shift=20, name_in='simple', image_in_tag='dither', image_out_tag='simple') self.pipeline.add_module(simple) self.pipeline.run_module('simple') data = self.pipeline.get_data('simple') assert np.allclose(data[0, 74, 74], -0.05288064325101517, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 2.7755575615628916e-22, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) def test_mean_background_shift(self): mean = MeanBackgroundSubtractionModule(shift=20, cubes=1, name_in='mean2', image_in_tag='dither', image_out_tag='mean2') self.pipeline.add_module(mean) self.pipeline.run_module('mean2') data = self.pipeline.get_data('mean2') assert np.allclose(data[0, 74, 24], 0.0530465391626132, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.3970872216676808e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) def test_mean_background_nframes(self): mean = MeanBackgroundSubtractionModule(shift=None, cubes=1, name_in='mean1', image_in_tag='dither', image_out_tag='mean1') self.pipeline.add_module(mean) self.pipeline.run_module('mean1') data = self.pipeline.get_data('mean1') assert np.allclose(data[0, 74, 24], 0.0530465391626132, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.3970872216676808e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) def test_dithering_attributes(self): pca_dither = DitheringBackgroundModule(name_in='pca_dither1', image_in_tag='dither', image_out_tag='pca_dither1', center=None, cubes=None, size=0.8, gaussian=0.1, subframe=0.5, pca_number=5, mask_star=0.1, crop=True, prepare=True, pca_background=True, combine='pca') self.pipeline.add_module(pca_dither) self.pipeline.run_module('pca_dither1') data = self.pipeline.get_data('dither_dither_crop1') assert np.allclose(data[0, 14, 14], 0.05304008435511765, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0002606205855710527, rtol=1e-6, atol=0.) assert data.shape == (80, 31, 31) data = self.pipeline.get_data('dither_dither_star1') assert np.allclose(data[0, 14, 14], 0.05304008435511765, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0010414302265833978, rtol=1e-6, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('dither_dither_mean1') assert np.allclose(data[0, 14, 14], 0.0530465391626132, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0010426228104479674, rtol=1e-6, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('dither_dither_background1') assert np.allclose(data[0, 14, 14], -0.00010629310882411674, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 3.5070523360436835e-07, rtol=1e-6, atol=0.) assert data.shape == (60, 31, 31) data = self.pipeline.get_data('dither_dither_pca_fit1') assert np.allclose(data[0, 14, 14], 1.5196412298279846e-05, rtol=1e-5, atol=0.) assert np.allclose(np.mean(data), 1.9779802529804516e-07, rtol=1e-4, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('dither_dither_pca_res1') assert np.allclose(data[0, 14, 14], 0.05302488794281937, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0010412324285580998, rtol=1e-6, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('dither_dither_pca_mask1') assert np.allclose(data[0, 14, 14], 0., rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.9531737773152965, rtol=1e-6, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('pca_dither1') assert np.allclose(data[0, 14, 14], 0.05302488794281937, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.001040627977720779, rtol=1e-6, atol=0.) assert data.shape == (80, 31, 31) data = self.pipeline.get_attribute('dither_dither_pca_res1', 'STAR_POSITION', static=False) assert np.allclose(data[0, 0], [15., 15.], rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 15., rtol=1e-6, atol=0.) assert data.shape == (20, 2) def test_dithering_center(self): pca_dither = DitheringBackgroundModule(name_in='pca_dither2', image_in_tag='dither', image_out_tag='pca_dither2', center=((25., 75.), (75., 75.), (75., 25.), (25., 25.)), cubes=1, size=0.8, gaussian=0.1, subframe=None, pca_number=5, mask_star=0.1, bad_pixel=None, crop=True, prepare=True, pca_background=True, combine='pca') self.pipeline.add_module(pca_dither) self.pipeline.run_module('pca_dither2') data = self.pipeline.get_data('pca_dither2') assert np.allclose(data[0, 14, 14], 0.05302488794328089, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0010406279782666378, rtol=1e-3, atol=0.) assert data.shape == (80, 31, 31) def test_nodding_background(self): mean = StackCubesModule(name_in='mean', image_in_tag='sky', image_out_tag='mean', combine='mean') self.pipeline.add_module(mean) self.pipeline.run_module('mean') data = self.pipeline.get_data('mean') assert np.allclose(data[0, 50, 50], 1.270877476321969e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 8.937360237872607e-07, rtol=limit, atol=0.) assert data.shape == (4, 100, 100) attribute = self.pipeline.get_attribute('mean', 'INDEX', static=False) assert np.allclose(np.mean(attribute), 1.5, rtol=limit, atol=0.) assert attribute.shape == (4, ) attribute = self.pipeline.get_attribute('mean', 'NFRAMES', static=False) assert np.allclose(np.mean(attribute), 1, rtol=limit, atol=0.) assert attribute.shape == (4, ) nodding = NoddingBackgroundModule(name_in='nodding', sky_in_tag='mean', science_in_tag='star', image_out_tag='nodding', mode='both') self.pipeline.add_module(nodding) self.pipeline.run_module('nodding') data = self.pipeline.get_data('nodding') assert np.allclose(data[0, 50, 50], 0.09797142624717381, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.945087327935862e-05, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_line_background_mean(self): module = LineSubtractionModule(name_in='line1', image_in_tag='line', image_out_tag='line_mean', combine='mean', mask=0.1) self.pipeline.add_module(module) self.pipeline.run_module('line1') data = self.pipeline.get_data('line_mean') assert np.allclose(data[0, 10, 10], 0.09792388324443534, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0024245904637616735, rtol=limit, atol=0.) assert data.shape == (16, 20, 20) def test_line_background_median(self): module = LineSubtractionModule(name_in='line2', image_in_tag='line', image_out_tag='line_median', combine='median', mask=0.1) self.pipeline.add_module(module) self.pipeline.run_module('line2') data = self.pipeline.get_data('line_median') assert np.allclose(data[0, 10, 10], 0.09782789699611127, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0024723022374338196, rtol=limit, atol=0.) assert data.shape == (16, 20, 20)
class TestHdf5WritingModule: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, files=['test.hdf5']) def test_hdf5_writing(self): write = Hdf5WritingModule(file_name='test.hdf5', name_in='write1', output_dir=None, tag_dictionary={'images':'data1'}, keep_attributes=True, overwrite=True) self.pipeline.add_module(write) self.pipeline.run_module('write1') def test_no_data_tag(self): write = Hdf5WritingModule(file_name='test.hdf5', name_in='write2', output_dir=None, tag_dictionary={'empty':'empty'}, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('write2') assert len(warning) == 1 assert warning[0].message.args[0] == 'No data under the tag which is linked by the ' \ 'InputPort.' def test_overwrite_false(self): write = Hdf5WritingModule(file_name='test.hdf5', name_in='write3', output_dir=None, tag_dictionary={'images':'data2'}, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) self.pipeline.run_module('write3') def test_dictionary_none(self): write = Hdf5WritingModule(file_name='test.hdf5', name_in='write4', output_dir=None, tag_dictionary=None, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) self.pipeline.run_module('write4') def test_hdf5_reading(self): read = Hdf5ReadingModule(name_in='read', input_filename='test.hdf5', input_dir=self.test_dir, tag_dictionary={'data1':'data1', 'data2':'data2'}) self.pipeline.add_module(read) self.pipeline.run_module('read') data1 = self.pipeline.get_data('data1') data2 = self.pipeline.get_data('data2') data3 = self.pipeline.get_data('images') assert np.allclose(data1, data2, rtol=limit, atol=0.) assert np.allclose(data2, data3, rtol=limit, atol=0.) attribute1 = self.pipeline.get_attribute('images', 'PARANG', static=False) attribute2 = self.pipeline.get_attribute('data1', 'PARANG', static=False) attribute3 = self.pipeline.get_attribute('data2', 'PARANG', static=False) assert np.allclose(attribute1, attribute2, rtol=limit, atol=0.) assert np.allclose(attribute2, attribute3, rtol=limit, atol=0.)
class TestDetectionLimits(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir + "limits") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["limits"]) def test_read_data(self): read = FitsReadingModule(name_in="read", image_tag="read", input_dir=self.test_dir + "limits") self.pipeline.add_module(read) self.pipeline.run_module("read") data = self.pipeline.get_data("read") assert np.allclose(data[0, 10, 10], 0.00012958496246258364, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_angle_interpolation(self): angle = AngleInterpolationModule(name_in="angle", data_tag="read") self.pipeline.add_module(angle) self.pipeline.run_module("angle") data = self.pipeline.get_attribute("read", "PARANG", static=False) assert data[5] == 2.7777777777777777 assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_contrast_curve(self): proc = ["single", "multi"] for item in proc: if item == "multi": database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 4 contrast = ContrastCurveModule(name_in="contrast_" + item, image_in_tag="read", psf_in_tag="read", contrast_out_tag="limits_" + item, separation=(0.5, 0.6, 0.1), angle=(0., 360., 180.), threshold=("sigma", 5.), psf_scaling=1., aperture=0.1, pca_number=15, cent_size=None, edge_size=None, extra_rot=0.) self.pipeline.add_module(contrast) self.pipeline.run_module("contrast_" + item) data = self.pipeline.get_data("limits_" + item) assert np.allclose(data[0, 0], 5.00000000e-01, rtol=limit, atol=0.) assert np.allclose(data[0, 1], 2.3624384190310397, rtol=limit, atol=0.) assert np.allclose(data[0, 2], 0.05234065236317515, rtol=limit, atol=0.) assert np.allclose(data[0, 3], 0.00012147700290954244, rtol=limit, atol=0.) assert data.shape == (1, 4)
class TestHdf5WritingModule(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_random(self.test_dir) create_config(self.test_dir+"PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, files=["test.hdf5"]) def test_hdf5_writing(self): write = Hdf5WritingModule(file_name="test.hdf5", name_in="write1", output_dir=None, tag_dictionary={"images":"data1"}, keep_attributes=True, overwrite=True) self.pipeline.add_module(write) self.pipeline.run_module("write1") def test_no_data_tag(self): write = Hdf5WritingModule(file_name="test.hdf5", name_in="write2", output_dir=None, tag_dictionary={"empty":"empty"}, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("write2") assert len(warning) == 1 assert warning[0].message.args[0] == "No data under the tag which is linked by the " \ "InputPort." def test_overwrite_false(self): write = Hdf5WritingModule(file_name="test.hdf5", name_in="write3", output_dir=None, tag_dictionary={"images":"data2"}, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) self.pipeline.run_module("write3") def test_dictionary_none(self): write = Hdf5WritingModule(file_name="test.hdf5", name_in="write4", output_dir=None, tag_dictionary=None, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) self.pipeline.run_module("write4") def test_hdf5_reading(self): read = Hdf5ReadingModule(name_in="read", input_filename="test.hdf5", input_dir=self.test_dir, tag_dictionary={"data1":"data1", "data2":"data2"}) self.pipeline.add_module(read) self.pipeline.run_module("read") data1 = self.pipeline.get_data("data1") data2 = self.pipeline.get_data("data2") data3 = self.pipeline.get_data("images") assert np.allclose(data1, data2, rtol=limit, atol=0.) assert np.allclose(data2, data3, rtol=limit, atol=0.) attribute1 = self.pipeline.get_attribute("images", "PARANG", static=False) attribute2 = self.pipeline.get_attribute("data1", "PARANG", static=False) attribute3 = self.pipeline.get_attribute("data2", "PARANG", static=False) assert np.allclose(attribute1, attribute2, rtol=limit, atol=0.) assert np.allclose(attribute2, attribute3, rtol=limit, atol=0.)
class TestLimits: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'self.limits', npix=21, pos_star=10.) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(path=self.test_dir, folders=['self.limits'], files=['model.AMES-Cond-2000.M-0.0.NaCo.Vega']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir+'self.limits') self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(108.43655133957289, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_angle_interpolation(self) -> None: module = AngleInterpolationModule(name_in='angle', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle') attr = self.pipeline.get_attribute('read', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(900., rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_contrast_curve(self) -> None: proc = ['single', 'multi'] for item in proc: if item == 'multi': with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = ContrastCurveModule(name_in='contrast_'+item, image_in_tag='read', psf_in_tag='read', contrast_out_tag='limits_'+item, separation=(0.2, 0.3, 0.2), angle=(0., 360., 180.), threshold=('sigma', 5.), psf_scaling=1., aperture=0.05, pca_number=2, cent_size=None, edge_size=1., extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('contrast_'+item) data = self.pipeline.get_data('limits_'+item) assert data[0, 0] == pytest.approx(0.2, rel=self.limit, abs=0.) assert data[0, 1] == pytest.approx(2.5223717329932676, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.0006250749411563979, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(0.00026866680137822624, rel=self.limit, abs=0.) assert data.shape == (1, 4) def test_contrast_curve_fpf(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = ContrastCurveModule(name_in='contrast_fpf', image_in_tag='read', psf_in_tag='read', contrast_out_tag='limits_fpf', separation=(0.2, 0.3, 0.2), angle=(0., 360., 180.), threshold=('fpf', 1e-6), psf_scaling=1., aperture=0.05, pca_number=2, cent_size=None, edge_size=1., extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('contrast_fpf') data = self.pipeline.get_data('limits_fpf') assert data[0, 0] == pytest.approx(0.2, rel=self.limit, abs=0.) assert data[0, 1] == pytest.approx(1.797063014325614, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.0006250749411564145, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(1e-06, rel=self.limit, abs=0.) assert data.shape == (1, 4) def test_mass_limits(self) -> None: separation = np.linspace(0.1, 1.0, 10) contrast = -2.5*np.log10(1e-4/separation) variance = 0.1*contrast limits = np.zeros((10, 4)) limits[:, 0] = separation limits[:, 1] = contrast limits[:, 2] = variance with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['contrast_limits'] = limits url = 'https://home.strw.leidenuniv.nl/~stolker/pynpoint/' \ 'model.AMES-Cond-2000.M-0.0.NaCo.Vega' filename = self.test_dir + 'model.AMES-Cond-2000.M-0.0.NaCo.Vega' urlretrieve(url, filename) module = MassLimitsModule(model_file=filename, star_prop={'magnitude': 10., 'distance': 100., 'age': 20.}, name_in='mass', contrast_in_tag='contrast_limits', mass_out_tag='mass_limits', instr_filter='L\'') self.pipeline.add_module(module) self.pipeline.run_module('mass') data = self.pipeline.get_data('mass_limits') assert np.mean(data[:, 0]) == pytest.approx(0.55, rel=self.limit, abs=0.) assert np.mean(data[:, 1]) == pytest.approx(0.001891690765603738, rel=self.limit, abs=0.) assert np.mean(data[:, 2]) == pytest.approx(0.000964309686441908, rel=self.limit, abs=0.) assert np.mean(data[:, 3]) == pytest.approx(-0.000696402843279597, rel=self.limit, abs=0.) assert data.shape == (10, 4)
class TestAttributeReading: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir, ndit=10, parang=None) create_config(self.test_dir + 'PynPoint_config.ini') np.savetxt(self.test_dir + 'parang.dat', np.arange(1., 11., 1.)) np.savetxt(self.test_dir + 'new.dat', np.arange(10., 21., 1.)) np.savetxt(self.test_dir + 'attribute.dat', np.arange(1, 11, 1), fmt='%i') np.savetxt(self.test_dir + 'wavelength.dat', np.arange(1, 11, 1)) data2d = np.random.normal(loc=0, scale=2e-4, size=(10, 10)) np.savetxt(self.test_dir + 'data_2d.dat', data2d) self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, files=[ 'parang.dat', 'new.dat', 'attribute.dat', 'data_2d.dat', 'wavelength.dat' ]) def test_input_data(self) -> None: data = self.pipeline.get_data('images') assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) def test_parang_reading(self) -> None: module = ParangReadingModule(file_name='parang.dat', name_in='parang1', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('parang1') data = self.pipeline.get_data('header_images/PARANG') assert data.dtype == 'float64' assert np.allclose(data, np.arange(1., 11., 1.), rtol=limit, atol=0.) assert data.shape == (10, ) def test_parang_reading_same(self) -> None: module = ParangReadingModule(file_name='parang.dat', name_in='parang2', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('parang2') assert len(warning) == 1 assert warning[0].message.args[0] == 'The PARANG attribute is already present and ' \ 'contains the same values as are present in ' \ 'parang.dat.' def test_parang_reading_present(self) -> None: module = ParangReadingModule(file_name='new.dat', name_in='parang3', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('parang3') assert len(warning) == 1 assert warning[0].message.args[0] == 'The PARANG attribute is already present. Set the ' \ '\'overwrite\' parameter to True in order to ' \ 'overwrite the values with new.dat.' def test_parang_reading_overwrite(self) -> None: module = ParangReadingModule(file_name='new.dat', name_in='parang4', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('parang4') def test_parang_reading_2d(self) -> None: module = ParangReadingModule(file_name='data_2d.dat', name_in='parang6', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('parang6') assert str(error.value) == 'The input file data_2d.dat should contain a 1D data set with ' \ 'the parallactic angles.' def test_attribute_reading(self) -> None: module = AttributeReadingModule(file_name='attribute.dat', attribute='EXP_NO', name_in='attribute1', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('attribute1') data = self.pipeline.get_data('header_images/EXP_NO') assert data.dtype == 'int64' assert np.allclose(data, np.arange(1, 11, 1), rtol=limit, atol=0.) assert data.shape == (10, ) def test_attribute_reading_present(self) -> None: module = AttributeReadingModule(file_name='parang.dat', attribute='PARANG', name_in='attribute3', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('attribute3') assert warning[0].message.args[0] == 'The attribute \'PARANG\' is already present. Set ' \ 'the \'overwrite\' parameter to True in order to ' \ 'overwrite the values with parang.dat.' def test_attribute_reading_invalid(self) -> None: module = AttributeReadingModule(file_name='attribute.dat', attribute='test', name_in='attribute4', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('attribute4') assert str(error.value) == '\'test\' is not a valid attribute.' def test_attribute_reading_2d(self) -> None: module = AttributeReadingModule(file_name='data_2d.dat', attribute='DITHER_X', name_in='attribute5', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('attribute5') assert str(error.value) == 'The input file data_2d.dat should contain a 1D list with ' \ 'attributes.' def test_attribute_reading_same(self) -> None: module = AttributeReadingModule(file_name='attribute.dat', attribute='EXP_NO', name_in='attribute6', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('attribute6') assert len(warning) == 1 assert warning[0].message.args[0] == 'The \'EXP_NO\' attribute is already present and ' \ 'contains the same values as are present in ' \ 'attribute.dat.' def test_attribute_reading_overwrite(self) -> None: module = AttributeReadingModule(file_name='parang.dat', attribute='PARANG', name_in='attribute7', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('attribute7') attribute = self.pipeline.get_attribute('images', 'PARANG', static=False) assert np.allclose(attribute, np.arange(1., 11., 1.), rtol=limit, atol=0.) def test_wavelength_reading(self) -> None: module = WavelengthReadingModule(file_name='wavelength.dat', name_in='wavelength1', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('wavelength1') data = self.pipeline.get_data('header_images/WAVELENGTH') assert data.dtype == 'float64' assert np.allclose(data, np.arange(1., 11., 1.), rtol=limit, atol=0.) assert data.shape == (10, ) def test_wavelength_reading_same(self) -> None: module = WavelengthReadingModule(file_name='wavelength.dat', name_in='wavelength2', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('wavelength2') assert len(warning) == 1 assert warning[0].message.args[0] == 'The WAVELENGTH attribute is already present and ' \ 'contains the same values as are present in ' \ 'wavelength.dat.' def test_wavelength_reading_present(self) -> None: module = WavelengthReadingModule(file_name='new.dat', name_in='wavelength3', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('wavelength3') assert len(warning) == 1 assert warning[0].message.args[0] == 'The WAVELENGTH attribute is already present. Set ' \ 'the \'overwrite\' parameter to True in order to ' \ 'overwrite the values with new.dat.' def test_wavelength_reading_overwrite(self) -> None: module = WavelengthReadingModule(file_name='new.dat', name_in='wavelength4', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('wavelength4') def test_wavelength_reading_2d(self) -> None: module = WavelengthReadingModule(file_name='data_2d.dat', name_in='wavelength6', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('wavelength6') assert str(error.value) == 'The input file data_2d.dat should contain a 1D data set with ' \ 'the wavelengths.'
class TestPypeline(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" np.random.seed(1) image_3d = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) image_2d = np.random.normal(loc=0, scale=2e-4, size=(10, 10)) science = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) dark = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) h5f = h5py.File(self.test_dir + "PynPoint_database.hdf5", "w") h5f.create_dataset("image_3d", data=image_3d) h5f.create_dataset("image_2d", data=image_2d) h5f.create_dataset("science", data=science) h5f.create_dataset("dark", data=dark) h5f.close() create_star_data(path=self.test_dir + "images") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["images"]) def test_output_port_name(self): read = FitsReadingModule(name_in="read", input_dir=self.test_dir + "images", image_tag="images") read.add_output_port("test") with pytest.warns(UserWarning) as warning: read.add_output_port("test") assert len(warning) == 1 assert warning[0].message.args[ 0] == "Tag 'test' of ReadingModule 'read' is already used." process = BadPixelSigmaFilterModule(name_in="badpixel", image_in_tag="images") process.add_output_port("test") with pytest.warns(UserWarning) as warning: process.add_output_port("test") assert len(warning) == 1 assert warning[0].message.args[0] == "Tag 'test' of ProcessingModule 'badpixel' is " \ "already used." self.pipeline.m_data_storage.close_connection() process._m_data_base = self.test_dir + "database.hdf5" process.add_output_port("new") def test_apply_function_to_images_3d(self): self.pipeline.set_attribute("config", "MEMORY", 1, static=True) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove1", image_in_tag="image_3d", image_out_tag="remove_3d") self.pipeline.add_module(remove) self.pipeline.run_module("remove1") data = self.pipeline.get_data("image_3d") assert np.allclose(np.mean(data), 1.0141852764605783e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) data = self.pipeline.get_data("remove_3d") assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_2d(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove2", image_in_tag="image_2d", image_out_tag="remove_2d") self.pipeline.add_module(remove) self.pipeline.run_module("remove2") data = self.pipeline.get_data("image_2d") assert np.allclose(np.mean(data), 1.2869483197883442e-05, rtol=limit, atol=0.) assert data.shape == (10, 10) data = self.pipeline.get_data("remove_2d") assert np.allclose(np.mean(data), 1.3957075246029751e-05, rtol=limit, atol=0.) assert data.shape == (10, 9) def test_apply_function_to_images_same_port(self): dark = DarkCalibrationModule(name_in="dark1", image_in_tag="science", dark_in_tag="dark", image_out_tag="science") self.pipeline.add_module(dark) self.pipeline.run_module("dark1") data = self.pipeline.get_data("science") assert np.allclose(np.mean(data), -3.190113568690675e-06, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) self.pipeline.set_attribute("config", "MEMORY", 0, static=True) dark = DarkCalibrationModule(name_in="dark2", image_in_tag="science", dark_in_tag="dark", image_out_tag="science") self.pipeline.add_module(dark) self.pipeline.run_module("dark2") data = self.pipeline.get_data("science") assert np.allclose(np.mean(data), -1.026073475228737e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove3", image_in_tag="remove_3d", image_out_tag="remove_3d") self.pipeline.add_module(remove) with pytest.raises(ValueError) as error: self.pipeline.run_module("remove3") assert str(error.value) == "Input and output port have the same tag while the input " \ "function is changing the image shape. This is only " \ "possible with MEMORY=None." def test_apply_function_to_images_memory_none(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove4", image_in_tag="image_3d", image_out_tag="remove_3d_none") self.pipeline.add_module(remove) self.pipeline.run_module("remove4") data = self.pipeline.get_data("remove_3d_none") assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_3d_args(self): self.pipeline.set_attribute("config", "MEMORY", 1, static=True) self.pipeline.set_attribute("image_3d", "PIXSCALE", 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in="scale1", image_in_tag="image_3d", image_out_tag="scale_3d") self.pipeline.add_module(scale) self.pipeline.run_module("scale1") data = self.pipeline.get_data("scale_3d") assert np.allclose(np.mean(data), 7.042953308754017e-05, rtol=limit, atol=0.) assert data.shape == (4, 12, 12) attribute = self.pipeline.get_attribute("scale_3d", "PIXSCALE", static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.) def test_apply_function_to_images_2d_args(self): self.pipeline.set_attribute("image_2d", "PIXSCALE", 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in="scale2", image_in_tag="image_2d", image_out_tag="scale_2d") self.pipeline.add_module(scale) self.pipeline.run_module("scale2") data = self.pipeline.get_data("scale_2d") assert np.allclose(np.mean(data), 8.937141109641279e-05, rtol=limit, atol=0.) assert data.shape == (12, 12) attribute = self.pipeline.get_attribute("scale_2d", "PIXSCALE", static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.)
class TestPsfSubtraction: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' create_fake(path=self.test_dir + 'science', ndit=[20, 20, 20, 20], nframes=[20, 20, 20, 20], exp_no=[1, 2, 3, 4], npix=(100, 100), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=10., contrast=3e-3) create_fake(path=self.test_dir + 'reference', ndit=[10, 10, 10, 10], nframes=[10, 10, 10, 10], exp_no=[1, 2, 3, 4], npix=(100, 100), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['science', 'reference']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='science', input_dir=self.test_dir + 'science') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('science') assert np.allclose(data[0, 50, 50], 0.09798413502193708, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010063896953157961, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) module = FitsReadingModule(name_in='read2', image_tag='reference', input_dir=self.test_dir + 'reference') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('reference') assert np.allclose(data[0, 50, 50], 0.09798413502193708, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_angle_interpolation(self) -> None: module = AngleInterpolationModule(name_in='angle', data_tag='science') self.pipeline.add_module(module) self.pipeline.run_module('angle') data = self.pipeline.get_data('header_science/PARANG') assert np.allclose(data[0], 0., rtol=limit, atol=0.) assert np.allclose(data[15], 19.736842105263158, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 50.0, rtol=limit, atol=0.) assert data.shape == (80, ) def test_psf_preparation(self) -> None: module = PSFpreparationModule(name_in='prep1', image_in_tag='science', image_out_tag='science_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.2, edge_size=1.0) self.pipeline.add_module(module) self.pipeline.run_module('prep1') data = self.pipeline.get_data('science_prep') assert np.allclose(data[0, 0, 0], 0.0, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], 0.0, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 4.534001223501053e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) module = PSFpreparationModule(name_in='prep2', image_in_tag='reference', image_out_tag='reference_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.2, edge_size=1.0) self.pipeline.add_module(module) self.pipeline.run_module('prep2') data = self.pipeline.get_data('reference_prep') assert np.allclose(data[0, 0, 0], 0.0, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], 0.0, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.227592050148539e-07, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_classical_adi(self) -> None: module = ClassicalADIModule(threshold=None, nreference=None, residuals='mean', extra_rot=0., name_in='cadi1', image_in_tag='science', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi1') data = self.pipeline.get_data('cadi_res') assert np.allclose(np.mean(data), -6.359018260066029e-08, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('cadi_stack') assert np.allclose(np.mean(data), -8.318786331552922e-08, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) def test_classical_adi_threshold(self) -> None: module = ClassicalADIModule(threshold=(0.1, 0.03, 1.), nreference=5, residuals='median', extra_rot=0., name_in='cadi2', image_in_tag='science', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi2') data = self.pipeline.get_data('cadi_res') assert np.allclose(np.mean(data), 1.6523183877608216e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('cadi_stack') assert np.allclose(np.mean(data), 1.413437242880268e-07, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) def test_psf_subtraction_pca_single(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_single', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_single', res_median_tag='res_median_single', res_weighted_tag='res_weighted_single', res_rot_mean_clip_tag='res_clip_single', res_arr_out_tag='res_arr_single', basis_out_tag='basis_single', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_single') data = self.pipeline.get_data('res_mean_single') assert np.allclose(np.mean(data), 2.6959819771522928e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_median_single') assert np.allclose(np.mean(data), -2.4142571236920345e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_weighted_single') assert np.allclose(np.mean(data), -5.293559651636843e-09, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_clip_single') assert np.allclose(np.mean(data), 2.6199554737979536e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_arr_single5') assert np.allclose(np.mean(data), 3.184676024912723e-08, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('basis_single') assert np.allclose(np.mean(data), -1.593245396350998e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_no_mean', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean') data = self.pipeline.get_data('res_mean_no_mean') assert np.allclose(np.mean(data), 2.413203757426239e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_no_mean') assert np.allclose(np.mean(data), 7.4728664805632875e-06, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_ref', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref') data = self.pipeline.get_data('res_mean_ref') assert np.allclose(np.mean(data), 1.1662201512335965e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref') assert np.allclose(np.mean(data), -1.6780507257603104e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_ref_no_mean', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean') data = self.pipeline.get_data('res_mean_ref_no_mean') assert np.allclose(np.mean(data), 3.7029738044199534e-07, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref_no_mean') assert np.allclose(np.mean(data), 2.3755682312090375e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_pca_single_mask(self) -> None: pca = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_single_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_single_mask', res_median_tag='res_median_single_mask', res_weighted_tag='res_weighted_single_mask', res_rot_mean_clip_tag='res_clip_single_mask', res_arr_out_tag='res_arr_single_mask', basis_out_tag='basis_single_mask', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(pca) self.pipeline.run_module('pca_single_mask') data = self.pipeline.get_data('res_mean_single_mask') assert np.allclose(np.mean(data), -1.6536519510012155e-09, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_median_single_mask') assert np.allclose(np.mean(data), 5.6094356668078245e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_weighted_single_mask') assert np.allclose(np.mean(data), 4.7079857263662695e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_clip_single_mask') assert np.allclose(np.mean(data), -4.875856901892831e-10, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_arr_single_mask5') assert np.allclose(np.mean(data), -1.700674890172441e-09, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('basis_single_mask') assert np.allclose(np.mean(data), 5.584100479595007e-06, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_no_mean_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean_mask') data = self.pipeline.get_data('res_mean_no_mean_mask') assert np.allclose(np.mean(data), -1.0905008724474168e-09, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_no_mean_mask') assert np.allclose(np.sum(np.abs(data)), 1025.2018448288406, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_ref_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_mask', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_mask') data = self.pipeline.get_data('res_mean_ref_mask') assert np.allclose(np.mean(data), -9.962692629500833e-10, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref_mask') assert np.allclose(np.mean(data), -2.3165670099810983e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_ref_no_mean_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean_mask') data = self.pipeline.get_data('res_mean_ref_no_mean_mask') assert np.allclose(np.mean(data), 3.848255803450399e-07, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref_no_mean_mask') assert np.allclose(np.sum(np.abs(data)), 1026.3329224435665, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_pca_multi(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_multi', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_multi', res_median_tag='res_median_multi', res_weighted_tag='res_weighted_multi', res_rot_mean_clip_tag='res_clip_multi', res_arr_out_tag=None, basis_out_tag='basis_multi', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi') data_single = self.pipeline.get_data('res_mean_single') data_multi = self.pipeline.get_data('res_mean_multi') assert np.allclose(data_single, data_multi, rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_median_single') data_multi = self.pipeline.get_data('res_median_multi') assert np.allclose(data_single, data_multi, rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_weighted_single') data_multi = self.pipeline.get_data('res_weighted_multi') assert np.allclose(data_single, data_multi, rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('basis_single') data_multi = self.pipeline.get_data('basis_multi') assert np.allclose(data_single, data_multi, rtol=1e-5, atol=0.) assert data_single.shape == data_multi.shape def test_psf_subtraction_pca_multi_mask(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_multi_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_multi_mask', res_median_tag='res_median_multi_mask', res_weighted_tag='res_weighted_multi_mask', res_rot_mean_clip_tag='res_clip_multi_mask', res_arr_out_tag=None, basis_out_tag='basis_multi_mask', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi_mask') data_single = self.pipeline.get_data('res_mean_single_mask') data_multi = self.pipeline.get_data('res_mean_multi_mask') assert np.allclose(data_single[data_single > 1e-12], data_multi[data_multi > 1e-12], rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_median_single_mask') data_multi = self.pipeline.get_data('res_median_multi_mask') assert np.allclose(data_single[data_single > 1e-12], data_multi[data_multi > 1e-12], rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_weighted_single_mask') data_multi = self.pipeline.get_data('res_weighted_multi_mask') assert np.allclose(data_single[data_single > 1e-12], data_multi[data_multi > 1e-12], rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('basis_single_mask') data_multi = self.pipeline.get_data('basis_multi_mask') assert np.allclose(data_single, data_multi, rtol=1e-5, atol=0.) assert data_single.shape == data_multi.shape def test_psf_subtraction_len_parang(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 1 parang = self.pipeline.get_data('header_science/PARANG') self.pipeline.set_attribute('science_prep', 'PARANG', np.append(parang, 0.), static=False) module = PcaPsfSubtractionModule(pca_numbers=[ 5, ], name_in='pca_len_parang', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_len_parang', extra_rot=0.) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('pca_len_parang') assert str(error.value) == 'The number of images (80) is not equal to the number of ' \ 'parallactic angles (81).'
class TestNearInitModule(object): def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' self.fitsfile = self.test_dir + 'near/images_1.fits' create_near_data(path=self.test_dir + 'near') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) self.pipeline.set_attribute('config', 'NFRAMES', 'ESO DET CHOP NCYCLES', static=True) self.pipeline.set_attribute('config', 'EXP_NO', 'ESO TPL EXPNO', static=True) self.pipeline.set_attribute('config', 'NDIT', 'None', static=True) self.pipeline.set_attribute('config', 'PARANG_START', 'None', static=True) self.pipeline.set_attribute('config', 'PARANG_END', 'None', static=True) self.pipeline.set_attribute('config', 'DITHER_X', 'None', static=True) self.pipeline.set_attribute('config', 'DITHER_Y', 'None', static=True) self.pipeline.set_attribute('config', 'PIXSCALE', 0.045, static=True) self.pipeline.set_attribute('config', 'MEMORY', 100, static=True) self.positions = ('chopa', 'chopb') def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['near']) def test_near_read(self) -> None: module = NearReadingModule(name_in='read1a', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) self.pipeline.run_module('read1a') for item in self.positions: data = self.pipeline.get_data(item) assert np.allclose(np.mean(data), 0.060582854, rtol=limit, atol=0.) assert data.shape == (20, 10, 10) def test_near_subtract_crop_mean(self) -> None: module = NearReadingModule(name_in='read1b', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1], subtract=True, crop=(None, None, 0.3), combine='mean') self.pipeline.add_module(module) self.pipeline.run_module('read1b') data = self.pipeline.get_data(self.positions[0]) assert np.allclose(np.mean(data), 0.0, rtol=limit, atol=0.) assert data.shape == (4, 7, 7) data = self.pipeline.get_data(self.positions[1]) assert np.allclose(np.mean(data), 0.0, rtol=limit, atol=0.) assert data.shape == (4, 7, 7) def test_near_median(self) -> None: module = NearReadingModule(name_in='read1c', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1], combine='median') self.pipeline.add_module(module) self.pipeline.run_module('read1c') data = self.pipeline.get_data(self.positions[0]) assert np.allclose(np.mean(data), 0.060582854, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) data = self.pipeline.get_data(self.positions[1]) assert np.allclose(np.mean(data), 0.060582854, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) def test_static_not_found(self) -> None: self.pipeline.set_attribute('config', 'DIT', 'Test', static=True) module = NearReadingModule(name_in='read2', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read2') assert len(warning) == 8 for item in warning: assert item.message.args[0] == 'Static attribute DIT (=Test) not found in the FITS ' \ 'header.' self.pipeline.set_attribute('config', 'DIT', 'ESO DET SEQ1 DIT', static=True) def test_nonstatic_not_found(self) -> None: self.pipeline.set_attribute('config', 'NDIT', 'Test', static=True) module = NearReadingModule(name_in='read3', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read3') assert len(warning) == 8 for item in warning: assert item.message.args[0] == 'Non-static attribute NDIT (=Test) not found in the ' \ 'FITS header.' self.pipeline.set_attribute('config', 'NDIT', 'None', static=True) def test_check_header(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[0].header['ESO DET CHOP ST'] = 'F' hdulist[0].header['ESO DET CHOP CYCSKIP'] = 1 hdulist[0].header['ESO DET CHOP CYCSUM'] = 'T' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read4', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read4') assert len(warning) == 3 assert warning[0].message.args[ 0] == 'Dataset was obtained without chopping.' assert warning[1].message.args[ 0] == 'Chop cycles (1) have been skipped.' assert warning[2].message.args[ 0] == 'FITS file contains averaged images.' with fits.open(self.fitsfile) as hdulist: hdulist[0].header['ESO DET CHOP ST'] = 'T' hdulist[0].header['ESO DET CHOP CYCSKIP'] = 0 hdulist[0].header['ESO DET CHOP CYCSUM'] = 'F' hdulist.writeto(self.fitsfile, overwrite=True) def test_frame_type_invalid(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[10].header['ESO DET FRAM TYPE'] = 'Test' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read5', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read5') assert str(error.value) == 'Frame type (Test) not a valid value. Expecting HCYCLE1 or ' \ 'HCYCLE2 as value for ESO DET FRAM TYPE.' def test_frame_type_missing(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[10].header.remove('ESO DET FRAM TYPE') hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read6', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read6') assert str( error.value ) == 'Frame type not found in the FITS header. Image number: 9.' def test_same_cycle(self) -> None: with fits.open(self.fitsfile) as hdulist: with pytest.warns(UserWarning) as warning: hdulist[10].header['ESO DET FRAM TYPE'] = 'HCYCLE1' assert len(warning) == 1 assert warning[0].message.args[0] == 'Keyword name \'ESO DET FRAM TYPE\' is greater ' \ 'than 8 characters or contains characters not ' \ 'allowed by the FITS standard; a HIERARCH card ' \ 'will be created.' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read7', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read7') assert len(warning) == 2 assert warning[0].message.args[0] == 'Previous and current chop position (HCYCLE1) are ' \ 'the same. Skipping the current image.' assert warning[1].message.args[0] == 'The number of images is not equal for chop A and ' \ 'chop B.' def test_odd_number_images(self) -> None: with fits.open(self.fitsfile) as hdulist: del hdulist[11] hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read8', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read8') assert len(warning) == 2 assert warning[0].message.args[0] == f'FITS file contains odd number of images: ' \ f'{self.fitsfile}' assert warning[1].message.args[0] == 'The number of chop cycles (5) is not equal to ' \ 'half the number of available HDU images (4).'
class TestFrameSelection: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir + 'images') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir + 'images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) attr = self.pipeline.get_attribute('read', 'NDIT', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('read', 'NFRAMES', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (2, ) self.pipeline.set_attribute('read', 'NDIT', [4, 4], static=False) def test_remove_last_frame(self) -> None: module = RemoveLastFrameModule(name_in='last', image_in_tag='read', image_out_tag='last') self.pipeline.add_module(module) self.pipeline.run_module('last') data = self.pipeline.get_data('last') assert np.sum(data) == pytest.approx(84.68885503527224, rel=self.limit, abs=0.) assert data.shape == (8, 11, 11) self.pipeline.set_attribute('last', 'PARANG', np.arange(8.), static=False) self.pipeline.set_attribute('last', 'STAR_POSITION', np.full((8, 2), 5.), static=False) attr = self.pipeline.get_attribute('last', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(28., rel=self.limit, abs=0.) assert attr.shape == (8, ) attr = self.pipeline.get_attribute('last', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(80., rel=self.limit, abs=0.) assert attr.shape == (8, 2) def test_remove_start_frame(self) -> None: module = RemoveStartFramesModule(frames=1, name_in='start', image_in_tag='last', image_out_tag='start') self.pipeline.add_module(module) self.pipeline.run_module('start') data = self.pipeline.get_data('start') assert np.sum(data) == pytest.approx(64.44307047549808, rel=self.limit, abs=0.) assert data.shape == (6, 11, 11) attr = self.pipeline.get_attribute('start', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(24., rel=self.limit, abs=0.) assert attr.shape == (6, ) attr = self.pipeline.get_attribute('start', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(60., rel=self.limit, abs=0.) assert attr.shape == (6, 2) def test_remove_frames(self) -> None: module = RemoveFramesModule(name_in='remove', image_in_tag='start', selected_out_tag='selected', removed_out_tag='removed', frames=[2, 5]) self.pipeline.add_module(module) self.pipeline.run_module('remove') data = self.pipeline.get_data('selected') assert np.sum(data) == pytest.approx(43.68337741822863, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('removed') assert np.sum(data) == pytest.approx(20.759693057269445, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) attr = self.pipeline.get_attribute('selected', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(14., rel=self.limit, abs=0.) assert attr.shape == (4, ) attr = self.pipeline.get_attribute('selected', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(40., rel=self.limit, abs=0.) assert attr.shape == (4, 2) attr = self.pipeline.get_attribute('removed', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(10., rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('removed', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(20., rel=self.limit, abs=0.) assert attr.shape == (2, 2) def test_frame_selection(self) -> None: module = FrameSelectionModule(name_in='select1', image_in_tag='start', selected_out_tag='selected1', removed_out_tag='removed1', index_out_tag='index1', method='median', threshold=2., fwhm=0.1, aperture=('circular', 0.1), position=(None, None, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('select1') data = self.pipeline.get_data('selected1') assert np.sum(data) == pytest.approx(54.58514780071149, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) data = self.pipeline.get_data('removed1') assert np.sum(data) == pytest.approx(9.857922674786586, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) data = self.pipeline.get_data('index1') assert np.sum(data) == pytest.approx(5, rel=self.limit, abs=0.) assert data.shape == (1, ) attr = self.pipeline.get_attribute('selected1', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(17., rel=self.limit, abs=0.) assert attr.shape == (5, ) attr = self.pipeline.get_attribute('selected1', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(50, rel=self.limit, abs=0.) assert attr.shape == (5, 2) attr = self.pipeline.get_attribute('removed1', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(7., rel=self.limit, abs=0.) assert attr.shape == (1, ) attr = self.pipeline.get_attribute('removed1', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (1, 2) module = FrameSelectionModule(name_in='select2', image_in_tag='start', selected_out_tag='selected2', removed_out_tag='removed2', index_out_tag='index2', method='max', threshold=1., fwhm=0.1, aperture=('annulus', 0.05, 0.1), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select2') data = self.pipeline.get_data('selected2') assert np.sum(data) == pytest.approx(21.42652724866543, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) data = self.pipeline.get_data('removed2') assert np.sum(data) == pytest.approx(43.016543226832646, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('index2') assert np.sum(data) == pytest.approx(10, rel=self.limit, abs=0.) assert data.shape == (4, ) attr = self.pipeline.get_attribute('selected2', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(8., rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('selected2', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(20, rel=self.limit, abs=0.) assert attr.shape == (2, 2) attr = self.pipeline.get_attribute('removed2', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(16., rel=self.limit, abs=0.) assert attr.shape == (4, ) attr = self.pipeline.get_attribute('removed2', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(40, rel=self.limit, abs=0.) assert attr.shape == (4, 2) module = FrameSelectionModule(name_in='select3', image_in_tag='start', selected_out_tag='selected3', removed_out_tag='removed3', index_out_tag='index3', method='range', threshold=(10., 10.7), fwhm=0.1, aperture=('circular', 0.1), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select3') data = self.pipeline.get_data('selected3') assert np.sum(data) == pytest.approx(22.2568501695632, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) data = self.pipeline.get_data('removed3') assert np.sum(data) == pytest.approx(42.18622030593487, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('index3') assert np.sum(data) == pytest.approx(12, rel=self.limit, abs=0.) assert data.shape == (4, ) def test_image_statistics_full(self) -> None: module = ImageStatisticsModule(name_in='stat1', image_in_tag='read', stat_out_tag='stat1', position=None) self.pipeline.add_module(module) self.pipeline.run_module('stat1') data = self.pipeline.get_data('stat1') assert np.sum(data) == pytest.approx(115.68591492205017, rel=self.limit, abs=0.) assert data.shape == (10, 6) def test_image_statistics_posiiton(self) -> None: module = ImageStatisticsModule(name_in='stat2', image_in_tag='read', stat_out_tag='stat2', position=(5, 5, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('stat2') data = self.pipeline.get_data('stat2') assert np.sum(data) == pytest.approx(118.7138708968444, rel=self.limit, abs=0.) assert data.shape == (10, 6) def test_frame_similarity_mse(self) -> None: module = FrameSimilarityModule(name_in='simi1', image_tag='read', method='MSE', mask_radius=(0., 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('simi1') attr = self.pipeline.get_attribute('read', 'MSE', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(0.11739141370277852, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_frame_similarity_pcc(self) -> None: module = FrameSimilarityModule(name_in='simi2', image_tag='read', method='PCC', mask_radius=(0., 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('simi2') attr = self.pipeline.get_attribute('read', 'PCC', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(9.134820985662829, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_frame_similarity_ssim(self) -> None: module = FrameSimilarityModule(name_in='simi3', image_tag='read', method='SSIM', mask_radius=(0., 0.2), temporal_median='constant') self.pipeline.add_module(module) self.pipeline.run_module('simi3') attr = self.pipeline.get_attribute('read', 'SSIM', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(9.096830542868524, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_select_by_attribute(self) -> None: self.pipeline.set_attribute('read', 'INDEX', np.arange(44), static=False) module = SelectByAttributeModule(name_in='frame_removal_1', image_in_tag='read', attribute_tag='SSIM', number_frames=6, order='descending', selected_out_tag='select_sim', removed_out_tag='remove_sim') self.pipeline.add_module(module) self.pipeline.run_module('frame_removal_1') attr = self.pipeline.get_attribute('select_sim', 'INDEX', static=False) assert np.sum(attr) == pytest.approx(946, rel=self.limit, abs=0.) assert attr.shape == (44, ) attr = self.pipeline.get_attribute('select_sim', 'SSIM', static=False) assert np.sum(attr) == pytest.approx(5.556889532446573, rel=self.limit, abs=0.) assert attr.shape == (6, ) attr = self.pipeline.get_attribute('remove_sim', 'SSIM', static=False) assert np.sum(attr) == pytest.approx(3.539941010421951, rel=self.limit, abs=0.) assert attr.shape == (4, ) def test_residual_selection(self) -> None: module = ResidualSelectionModule(name_in='residual_select', image_in_tag='start', selected_out_tag='res_selected', removed_out_tag='res_removed', percentage=80., annulus_radii=(0.1, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('residual_select') data = self.pipeline.get_data('res_selected') assert np.sum(data) == pytest.approx(41.77295229983322, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('res_removed') assert np.sum(data) == pytest.approx(22.670118175664847, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11)
class TestExtract: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'star', npix_x=51, npix_y=51, x0=[10., 10., 10., 10.], y0=[10., 10., 10., 10.]) create_fake(path=self.test_dir + 'binary', ndit=[20, 20, 20, 20], nframes=[20, 20, 20, 20], exp_no=[1, 2, 3, 4], npix=(101, 101), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=20., contrast=1.) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(path=self.test_dir, folders=['star', 'binary']) def test_read_data(self): module = FitsReadingModule(name_in='read1', image_tag='star', input_dir=self.test_dir + 'star', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('star') assert np.allclose(data[0, 10, 10], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00038538535294683216, rtol=limit, atol=0.) assert data.shape == (40, 51, 51) module = FitsReadingModule(name_in='read2', image_tag='binary', input_dir=self.test_dir + 'binary', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('binary') assert np.allclose(data[0, 50, 50], 0.0986064357966972, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00019636787665654158, rtol=limit, atol=0.) assert data.shape == (80, 101, 101) def test_angle_interpolation(self): module = AngleInterpolationModule(name_in='angle', data_tag='binary') self.pipeline.add_module(module) self.pipeline.run_module('angle') data = self.pipeline.get_attribute('binary', 'PARANG', static=False) assert data[5] == 6.578947368421053 assert np.allclose(np.mean(data), 50.0, rtol=limit, atol=0.) assert data.shape == (80, ) parang = self.pipeline.get_attribute('binary', 'PARANG', static=False) self.pipeline.set_attribute('binary', 'PARANG', -1. * parang, static=False) data = self.pipeline.get_attribute('binary', 'PARANG', static=False) assert data[5] == -6.578947368421053 assert np.allclose(np.mean(data), -50.0, rtol=limit, atol=0.) assert data.shape == (80, ) def test_extract_position_none(self): module = StarExtractionModule(name_in='extract1', image_in_tag='star', image_out_tag='extract1', index_out_tag='index', image_size=0.4, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract1') assert len(warning) == 1 assert warning[0].message.args[0] == 'The new dataset that is stored under the tag name ' \ '\'index\' is empty.' data = self.pipeline.get_data('extract1') assert np.allclose(data[0, 7, 7], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444871536643222, rtol=limit, atol=0.) assert data.shape == (40, 15, 15) attr = self.pipeline.get_attribute('extract1', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 10 def test_extract_center_none(self): module = StarExtractionModule(name_in='extract2', image_in_tag='star', image_out_tag='extract2', index_out_tag='index', image_size=0.4, fwhm_star=0.1, position=(None, None, 1.)) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract2') assert len(warning) == 1 assert warning[0].message.args[0] == 'The new dataset that is stored under the tag name ' \ '\'index\' is empty.' data = self.pipeline.get_data('extract2') assert np.allclose(data[0, 7, 7], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444871536643222, rtol=limit, atol=0.) assert data.shape == (40, 15, 15) attr = self.pipeline.get_attribute('extract2', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 10 def test_extract_position(self): module = StarExtractionModule(name_in='extract7', image_in_tag='star', image_out_tag='extract7', index_out_tag=None, image_size=0.4, fwhm_star=0.1, position=(10, 10, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('extract7') data = self.pipeline.get_data('extract7') assert np.allclose(data[0, 7, 7], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444871536643222, rtol=limit, atol=0.) assert data.shape == (40, 15, 15) attr = self.pipeline.get_attribute('extract7', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 10 def test_extract_too_large(self): module = StarExtractionModule(name_in='extract3', image_in_tag='star', image_out_tag='extract3', index_out_tag=None, image_size=0.8, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract3') assert len(warning) == 40 for i, item in enumerate(warning): assert item.message.args[0] == f'Chosen image size is too large to crop the image ' \ f'around the brightest pixel (image index = {i}, ' \ f'pixel [x, y] = [10, 10]). Using the center of ' \ f'the image instead.' data = self.pipeline.get_data('extract3') assert np.allclose(data[0, 0, 0], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0004499242959139202, rtol=limit, atol=0.) assert data.shape == (40, 31, 31) attr = self.pipeline.get_attribute('extract3', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 25 def test_star_extract_cpu(self): with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = StarExtractionModule(name_in='extract4', image_in_tag='star', image_out_tag='extract4', index_out_tag='index', image_size=0.8, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract4') assert len(warning) == 1 assert warning[0].message.args[0] == 'Chosen image size is too large to crop the image ' \ 'around the brightest pixel. Using the center of ' \ 'the image instead.' def test_extract_binary(self): with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = ExtractBinaryModule(pos_center=(50., 50.), pos_binary=(50., 70.), name_in='extract5', image_in_tag='binary', image_out_tag='extract5', image_size=0.5, search_size=0.2, filter_size=None) self.pipeline.add_module(module) self.pipeline.run_module('extract5') data = self.pipeline.get_data('extract5') assert np.allclose(data[0, 9, 9], 0.09774483733119443, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0027700881940171283, rtol=limit, atol=0.) assert data.shape == (80, 19, 19) def test_extract_binary_filter(self): module = ExtractBinaryModule(pos_center=(50., 50.), pos_binary=(50., 70.), name_in='extract6', image_in_tag='binary', image_out_tag='extract6', image_size=0.5, search_size=0.2, filter_size=0.1) self.pipeline.add_module(module) self.pipeline.run_module('extract6') data = self.pipeline.get_data('extract6') assert np.allclose(data[0, 9, 9], 0.09774483733119443, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.002770040591615301, rtol=limit, atol=0.) assert data.shape == (80, 19, 19)
class TestHdf5ReadingModule: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir + 'data') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data']) def test_hdf5_reading(self) -> None: data = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) with h5py.File(self.test_dir + 'data/PynPoint_database.hdf5', 'a') as hdf_file: hdf_file.create_dataset('extra', data=data) hdf_file.create_dataset('header_extra/PARANG', data=[1., 2., 3., 4.]) read = Hdf5ReadingModule(name_in='read1', input_filename='PynPoint_database.hdf5', input_dir=self.test_dir + 'data', tag_dictionary={'images': 'images'}) self.pipeline.add_module(read) self.pipeline.run_module('read1') data = self.pipeline.get_data('images') assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) def test_dictionary_none(self) -> None: read = Hdf5ReadingModule(name_in='read2', input_filename='PynPoint_database.hdf5', input_dir=self.test_dir + 'data', tag_dictionary=None) self.pipeline.add_module(read) self.pipeline.run_module('read2') data = self.pipeline.get_data('images') assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) def test_wrong_tag(self) -> None: read = Hdf5ReadingModule(name_in='read3', input_filename='PynPoint_database.hdf5', input_dir=self.test_dir + 'data', tag_dictionary={'test': 'test'}) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read3') assert len(warning) == 1 assert warning[0].message.args[0] == 'The dataset with tag name \'test\' is not found in ' \ 'the HDF5 file.' with h5py.File(self.test_dir + 'data/PynPoint_database.hdf5', 'r') as hdf_file: assert set(hdf_file.keys()) == set( ['extra', 'header_extra', 'header_images', 'images']) def test_no_input_filename(self) -> None: read = Hdf5ReadingModule(name_in='read4', input_filename=None, input_dir=self.test_dir + 'data', tag_dictionary=None) self.pipeline.add_module(read) self.pipeline.run_module('read4') data = self.pipeline.get_data('images') assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert data.shape == (10, 100, 100)