class TestFilter: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir + 'data') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='data', input_dir=self.test_dir + 'data', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('data') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_gaussian_filter(self) -> None: module = GaussianFilterModule(name_in='filter', image_in_tag='data', image_out_tag='filtered', fwhm=0.1) self.pipeline.add_module(module) self.pipeline.run_module('filter') data = self.pipeline.get_data('filtered') assert np.allclose(data[0, 50, 50], 0.0388143943049942, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738068, rtol=limit, atol=0.) assert data.shape == (40, 100, 100)
class TestFilter: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir + 'data') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='data', input_dir=self.test_dir + 'data', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('data') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_gaussian_filter(self) -> None: module = GaussianFilterModule(name_in='filter', image_in_tag='data', image_out_tag='filtered', fwhm=0.1) self.pipeline.add_module(module) self.pipeline.run_module('filter') data = self.pipeline.get_data('filtered') assert np.sum(data) == pytest.approx(105.54278879805275, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11)
def test_get_data_range(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) data = pipeline.get_data('images', data_range=(0, 2)) assert data.shape == (2, 11, 11)
def test_create_pipeline_existing_database(self) -> None: np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(10, 100, 100)) with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'w') as hdf_file: dset = hdf_file.create_dataset('images', data=images) dset.attrs['PIXSCALE'] = 0.01 pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) data = pipeline.get_data('images') assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) data = pipeline.get_data('images', data_range=(0, 5)) assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 7.576979467771179e-07, rtol=limit, atol=0.) assert data.shape == (5, 100, 100) assert pipeline.get_attribute('images', 'PIXSCALE') == 0.01 os.remove(self.test_dir + 'PynPoint_database.hdf5')
def test_create_pipeline_existing_database(self) -> None: np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'w') as hdf_file: dset = hdf_file.create_dataset('images', data=images) dset.attrs['PIXSCALE'] = 0.01 pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) data = pipeline.get_data('images') assert np.mean(data) == pytest.approx(1.1824138000882435e-05, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) assert pipeline.get_attribute('images', 'PIXSCALE') == 0.01 os.remove(self.test_dir+'PynPoint_database.hdf5')
def test_create_pipeline_existing_database(self): np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(10, 100, 100)) h5f = h5py.File(self.test_dir + "PynPoint_database.hdf5", "w") dset = h5f.create_dataset("images", data=images) dset.attrs['PIXSCALE'] = 0.01 h5f.close() pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) data = pipeline.get_data("images") assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert pipeline.get_attribute("images", "PIXSCALE") == 0.01 os.remove(self.test_dir + "PynPoint_database.hdf5")
class TestDetectionLimits(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir + "limits") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["limits"]) def test_read_data(self): read = FitsReadingModule(name_in="read", image_tag="read", input_dir=self.test_dir + "limits") self.pipeline.add_module(read) self.pipeline.run_module("read") data = self.pipeline.get_data("read") assert np.allclose(data[0, 10, 10], 0.00012958496246258364, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_angle_interpolation(self): angle = AngleInterpolationModule(name_in="angle", data_tag="read") self.pipeline.add_module(angle) self.pipeline.run_module("angle") data = self.pipeline.get_attribute("read", "PARANG", static=False) assert data[5] == 2.7777777777777777 assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_contrast_curve(self): proc = ["single", "multi"] for item in proc: if item == "multi": database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 4 contrast = ContrastCurveModule(name_in="contrast_" + item, image_in_tag="read", psf_in_tag="read", contrast_out_tag="limits_" + item, separation=(0.5, 0.6, 0.1), angle=(0., 360., 180.), threshold=("sigma", 5.), psf_scaling=1., aperture=0.1, pca_number=15, cent_size=None, edge_size=None, extra_rot=0.) self.pipeline.add_module(contrast) self.pipeline.run_module("contrast_" + item) data = self.pipeline.get_data("limits_" + item) assert np.allclose(data[0, 0], 5.00000000e-01, rtol=limit, atol=0.) assert np.allclose(data[0, 1], 2.3624384190310397, rtol=limit, atol=0.) assert np.allclose(data[0, 2], 0.05234065236317515, rtol=limit, atol=0.) assert np.allclose(data[0, 3], 0.00012147700290954244, rtol=limit, atol=0.) assert data.shape == (1, 4)
class TestHdf5WritingModule(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_random(self.test_dir) create_config(self.test_dir+"PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, files=["test.hdf5"]) def test_hdf5_writing(self): write = Hdf5WritingModule(file_name="test.hdf5", name_in="write1", output_dir=None, tag_dictionary={"images":"data1"}, keep_attributes=True, overwrite=True) self.pipeline.add_module(write) self.pipeline.run_module("write1") def test_no_data_tag(self): write = Hdf5WritingModule(file_name="test.hdf5", name_in="write2", output_dir=None, tag_dictionary={"empty":"empty"}, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("write2") assert len(warning) == 1 assert warning[0].message.args[0] == "No data under the tag which is linked by the " \ "InputPort." def test_overwrite_false(self): write = Hdf5WritingModule(file_name="test.hdf5", name_in="write3", output_dir=None, tag_dictionary={"images":"data2"}, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) self.pipeline.run_module("write3") def test_dictionary_none(self): write = Hdf5WritingModule(file_name="test.hdf5", name_in="write4", output_dir=None, tag_dictionary=None, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) self.pipeline.run_module("write4") def test_hdf5_reading(self): read = Hdf5ReadingModule(name_in="read", input_filename="test.hdf5", input_dir=self.test_dir, tag_dictionary={"data1":"data1", "data2":"data2"}) self.pipeline.add_module(read) self.pipeline.run_module("read") data1 = self.pipeline.get_data("data1") data2 = self.pipeline.get_data("data2") data3 = self.pipeline.get_data("images") assert np.allclose(data1, data2, rtol=limit, atol=0.) assert np.allclose(data2, data3, rtol=limit, atol=0.) attribute1 = self.pipeline.get_attribute("images", "PARANG", static=False) attribute2 = self.pipeline.get_attribute("data1", "PARANG", static=False) attribute3 = self.pipeline.get_attribute("data2", "PARANG", static=False) assert np.allclose(attribute1, attribute2, rtol=limit, atol=0.) assert np.allclose(attribute2, attribute3, rtol=limit, atol=0.)
class TestHdf5ReadingModule: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir + 'data') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data']) def test_hdf5_reading(self) -> None: data = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) with h5py.File(self.test_dir + 'data/PynPoint_database.hdf5', 'a') as hdf_file: hdf_file.create_dataset('extra', data=data) hdf_file.create_dataset('header_extra/PARANG', data=[1., 2., 3., 4.]) read = Hdf5ReadingModule(name_in='read1', input_filename='PynPoint_database.hdf5', input_dir=self.test_dir + 'data', tag_dictionary={'images': 'images'}) self.pipeline.add_module(read) self.pipeline.run_module('read1') data = self.pipeline.get_data('images') assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) def test_dictionary_none(self) -> None: read = Hdf5ReadingModule(name_in='read2', input_filename='PynPoint_database.hdf5', input_dir=self.test_dir + 'data', tag_dictionary=None) self.pipeline.add_module(read) self.pipeline.run_module('read2') data = self.pipeline.get_data('images') assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) def test_wrong_tag(self) -> None: read = Hdf5ReadingModule(name_in='read3', input_filename='PynPoint_database.hdf5', input_dir=self.test_dir + 'data', tag_dictionary={'test': 'test'}) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read3') assert len(warning) == 1 assert warning[0].message.args[0] == 'The dataset with tag name \'test\' is not found in ' \ 'the HDF5 file.' with h5py.File(self.test_dir + 'data/PynPoint_database.hdf5', 'r') as hdf_file: assert set(hdf_file.keys()) == set( ['extra', 'header_extra', 'header_images', 'images']) def test_no_input_filename(self) -> None: read = Hdf5ReadingModule(name_in='read4', input_filename=None, input_dir=self.test_dir + 'data', tag_dictionary=None) self.pipeline.add_module(read) self.pipeline.run_module('read4') data = self.pipeline.get_data('images') assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert data.shape == (10, 100, 100)
class TestPsfSubtraction: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' create_fake(path=self.test_dir + 'science', ndit=[20, 20, 20, 20], nframes=[20, 20, 20, 20], exp_no=[1, 2, 3, 4], npix=(100, 100), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=10., contrast=3e-3) create_fake(path=self.test_dir + 'reference', ndit=[10, 10, 10, 10], nframes=[10, 10, 10, 10], exp_no=[1, 2, 3, 4], npix=(100, 100), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['science', 'reference']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='science', input_dir=self.test_dir + 'science') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('science') assert np.allclose(data[0, 50, 50], 0.09798413502193708, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010063896953157961, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) module = FitsReadingModule(name_in='read2', image_tag='reference', input_dir=self.test_dir + 'reference') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('reference') assert np.allclose(data[0, 50, 50], 0.09798413502193708, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_angle_interpolation(self) -> None: module = AngleInterpolationModule(name_in='angle', data_tag='science') self.pipeline.add_module(module) self.pipeline.run_module('angle') data = self.pipeline.get_data('header_science/PARANG') assert np.allclose(data[0], 0., rtol=limit, atol=0.) assert np.allclose(data[15], 19.736842105263158, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 50.0, rtol=limit, atol=0.) assert data.shape == (80, ) def test_psf_preparation(self) -> None: module = PSFpreparationModule(name_in='prep1', image_in_tag='science', image_out_tag='science_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.2, edge_size=1.0) self.pipeline.add_module(module) self.pipeline.run_module('prep1') data = self.pipeline.get_data('science_prep') assert np.allclose(data[0, 0, 0], 0.0, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], 0.0, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 4.534001223501053e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) module = PSFpreparationModule(name_in='prep2', image_in_tag='reference', image_out_tag='reference_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.2, edge_size=1.0) self.pipeline.add_module(module) self.pipeline.run_module('prep2') data = self.pipeline.get_data('reference_prep') assert np.allclose(data[0, 0, 0], 0.0, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], 0.0, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.227592050148539e-07, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_classical_adi(self) -> None: module = ClassicalADIModule(threshold=None, nreference=None, residuals='mean', extra_rot=0., name_in='cadi1', image_in_tag='science', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi1') data = self.pipeline.get_data('cadi_res') assert np.allclose(np.mean(data), -6.359018260066029e-08, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('cadi_stack') assert np.allclose(np.mean(data), -8.318786331552922e-08, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) def test_classical_adi_threshold(self) -> None: module = ClassicalADIModule(threshold=(0.1, 0.03, 1.), nreference=5, residuals='median', extra_rot=0., name_in='cadi2', image_in_tag='science', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi2') data = self.pipeline.get_data('cadi_res') assert np.allclose(np.mean(data), 1.6523183877608216e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('cadi_stack') assert np.allclose(np.mean(data), 1.413437242880268e-07, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) def test_psf_subtraction_pca_single(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_single', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_single', res_median_tag='res_median_single', res_weighted_tag='res_weighted_single', res_rot_mean_clip_tag='res_clip_single', res_arr_out_tag='res_arr_single', basis_out_tag='basis_single', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_single') data = self.pipeline.get_data('res_mean_single') assert np.allclose(np.mean(data), 2.6959819771522928e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_median_single') assert np.allclose(np.mean(data), -2.4142571236920345e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_weighted_single') assert np.allclose(np.mean(data), -5.293559651636843e-09, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_clip_single') assert np.allclose(np.mean(data), 2.6199554737979536e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_arr_single5') assert np.allclose(np.mean(data), 3.184676024912723e-08, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('basis_single') assert np.allclose(np.mean(data), -1.593245396350998e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_no_mean', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean') data = self.pipeline.get_data('res_mean_no_mean') assert np.allclose(np.mean(data), 2.413203757426239e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_no_mean') assert np.allclose(np.mean(data), 7.4728664805632875e-06, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_ref', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref') data = self.pipeline.get_data('res_mean_ref') assert np.allclose(np.mean(data), 1.1662201512335965e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref') assert np.allclose(np.mean(data), -1.6780507257603104e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_ref_no_mean', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean') data = self.pipeline.get_data('res_mean_ref_no_mean') assert np.allclose(np.mean(data), 3.7029738044199534e-07, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref_no_mean') assert np.allclose(np.mean(data), 2.3755682312090375e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_pca_single_mask(self) -> None: pca = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_single_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_single_mask', res_median_tag='res_median_single_mask', res_weighted_tag='res_weighted_single_mask', res_rot_mean_clip_tag='res_clip_single_mask', res_arr_out_tag='res_arr_single_mask', basis_out_tag='basis_single_mask', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(pca) self.pipeline.run_module('pca_single_mask') data = self.pipeline.get_data('res_mean_single_mask') assert np.allclose(np.mean(data), -1.6536519510012155e-09, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_median_single_mask') assert np.allclose(np.mean(data), 5.6094356668078245e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_weighted_single_mask') assert np.allclose(np.mean(data), 4.7079857263662695e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_clip_single_mask') assert np.allclose(np.mean(data), -4.875856901892831e-10, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_arr_single_mask5') assert np.allclose(np.mean(data), -1.700674890172441e-09, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('basis_single_mask') assert np.allclose(np.mean(data), 5.584100479595007e-06, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_no_mean_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean_mask') data = self.pipeline.get_data('res_mean_no_mean_mask') assert np.allclose(np.mean(data), -1.0905008724474168e-09, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_no_mean_mask') assert np.allclose(np.sum(np.abs(data)), 1025.2018448288406, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_ref_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_mask', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_mask') data = self.pipeline.get_data('res_mean_ref_mask') assert np.allclose(np.mean(data), -9.962692629500833e-10, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref_mask') assert np.allclose(np.mean(data), -2.3165670099810983e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_ref_no_mean_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean_mask') data = self.pipeline.get_data('res_mean_ref_no_mean_mask') assert np.allclose(np.mean(data), 3.848255803450399e-07, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref_no_mean_mask') assert np.allclose(np.sum(np.abs(data)), 1026.3329224435665, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_pca_multi(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_multi', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_multi', res_median_tag='res_median_multi', res_weighted_tag='res_weighted_multi', res_rot_mean_clip_tag='res_clip_multi', res_arr_out_tag=None, basis_out_tag='basis_multi', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi') data_single = self.pipeline.get_data('res_mean_single') data_multi = self.pipeline.get_data('res_mean_multi') assert np.allclose(data_single, data_multi, rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_median_single') data_multi = self.pipeline.get_data('res_median_multi') assert np.allclose(data_single, data_multi, rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_weighted_single') data_multi = self.pipeline.get_data('res_weighted_multi') assert np.allclose(data_single, data_multi, rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('basis_single') data_multi = self.pipeline.get_data('basis_multi') assert np.allclose(data_single, data_multi, rtol=1e-5, atol=0.) assert data_single.shape == data_multi.shape def test_psf_subtraction_pca_multi_mask(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_multi_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_multi_mask', res_median_tag='res_median_multi_mask', res_weighted_tag='res_weighted_multi_mask', res_rot_mean_clip_tag='res_clip_multi_mask', res_arr_out_tag=None, basis_out_tag='basis_multi_mask', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi_mask') data_single = self.pipeline.get_data('res_mean_single_mask') data_multi = self.pipeline.get_data('res_mean_multi_mask') assert np.allclose(data_single[data_single > 1e-12], data_multi[data_multi > 1e-12], rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_median_single_mask') data_multi = self.pipeline.get_data('res_median_multi_mask') assert np.allclose(data_single[data_single > 1e-12], data_multi[data_multi > 1e-12], rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_weighted_single_mask') data_multi = self.pipeline.get_data('res_weighted_multi_mask') assert np.allclose(data_single[data_single > 1e-12], data_multi[data_multi > 1e-12], rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('basis_single_mask') data_multi = self.pipeline.get_data('basis_multi_mask') assert np.allclose(data_single, data_multi, rtol=1e-5, atol=0.) assert data_single.shape == data_multi.shape def test_psf_subtraction_len_parang(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 1 parang = self.pipeline.get_data('header_science/PARANG') self.pipeline.set_attribute('science_prep', 'PARANG', np.append(parang, 0.), static=False) module = PcaPsfSubtractionModule(pca_numbers=[ 5, ], name_in='pca_len_parang', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_len_parang', extra_rot=0.) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('pca_len_parang') assert str(error.value) == 'The number of images (80) is not equal to the number of ' \ 'parallactic angles (81).'
class TestLimits: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'self.limits', npix=21, pos_star=10.) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(path=self.test_dir, folders=['self.limits'], files=['model.AMES-Cond-2000.M-0.0.NaCo.Vega']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir+'self.limits') self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(108.43655133957289, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_angle_interpolation(self) -> None: module = AngleInterpolationModule(name_in='angle', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle') attr = self.pipeline.get_attribute('read', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(900., rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_contrast_curve(self) -> None: proc = ['single', 'multi'] for item in proc: if item == 'multi': with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = ContrastCurveModule(name_in='contrast_'+item, image_in_tag='read', psf_in_tag='read', contrast_out_tag='limits_'+item, separation=(0.2, 0.3, 0.2), angle=(0., 360., 180.), threshold=('sigma', 5.), psf_scaling=1., aperture=0.05, pca_number=2, cent_size=None, edge_size=1., extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('contrast_'+item) data = self.pipeline.get_data('limits_'+item) assert data[0, 0] == pytest.approx(0.2, rel=self.limit, abs=0.) assert data[0, 1] == pytest.approx(2.5223717329932676, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.0006250749411563979, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(0.00026866680137822624, rel=self.limit, abs=0.) assert data.shape == (1, 4) def test_contrast_curve_fpf(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = ContrastCurveModule(name_in='contrast_fpf', image_in_tag='read', psf_in_tag='read', contrast_out_tag='limits_fpf', separation=(0.2, 0.3, 0.2), angle=(0., 360., 180.), threshold=('fpf', 1e-6), psf_scaling=1., aperture=0.05, pca_number=2, cent_size=None, edge_size=1., extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('contrast_fpf') data = self.pipeline.get_data('limits_fpf') assert data[0, 0] == pytest.approx(0.2, rel=self.limit, abs=0.) assert data[0, 1] == pytest.approx(1.797063014325614, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.0006250749411564145, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(1e-06, rel=self.limit, abs=0.) assert data.shape == (1, 4) def test_mass_limits(self) -> None: separation = np.linspace(0.1, 1.0, 10) contrast = -2.5*np.log10(1e-4/separation) variance = 0.1*contrast limits = np.zeros((10, 4)) limits[:, 0] = separation limits[:, 1] = contrast limits[:, 2] = variance with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['contrast_limits'] = limits url = 'https://home.strw.leidenuniv.nl/~stolker/pynpoint/' \ 'model.AMES-Cond-2000.M-0.0.NaCo.Vega' filename = self.test_dir + 'model.AMES-Cond-2000.M-0.0.NaCo.Vega' urlretrieve(url, filename) module = MassLimitsModule(model_file=filename, star_prop={'magnitude': 10., 'distance': 100., 'age': 20.}, name_in='mass', contrast_in_tag='contrast_limits', mass_out_tag='mass_limits', instr_filter='L\'') self.pipeline.add_module(module) self.pipeline.run_module('mass') data = self.pipeline.get_data('mass_limits') assert np.mean(data[:, 0]) == pytest.approx(0.55, rel=self.limit, abs=0.) assert np.mean(data[:, 1]) == pytest.approx(0.001891690765603738, rel=self.limit, abs=0.) assert np.mean(data[:, 2]) == pytest.approx(0.000964309686441908, rel=self.limit, abs=0.) assert np.mean(data[:, 3]) == pytest.approx(-0.000696402843279597, rel=self.limit, abs=0.) assert data.shape == (10, 4)
class TestPsfSubtractionAdi: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_fake_data(self.test_dir + 'science') create_fake_data(self.test_dir + 'reference') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['science', 'reference']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='science', input_dir=self.test_dir + 'science') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('science') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) self.pipeline.set_attribute('science', 'PARANG', np.linspace(0., 180., 10), static=False) module = FitsReadingModule(name_in='read2', image_tag='reference', input_dir=self.test_dir + 'reference') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('reference') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_psf_preparation(self) -> None: module = PSFpreparationModule(name_in='prep1', image_in_tag='science', image_out_tag='science_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.05, edge_size=1.) self.pipeline.add_module(module) self.pipeline.run_module('prep1') data = self.pipeline.get_data('science_prep') assert np.sum(data) == pytest.approx(5.029285028467547, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) module = PSFpreparationModule(name_in='prep2', image_in_tag='reference', image_out_tag='reference_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.05, edge_size=1.) self.pipeline.add_module(module) self.pipeline.run_module('prep2') data = self.pipeline.get_data('reference_prep') assert np.sum(data) == pytest.approx(5.029285028467547, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_classical_adi(self) -> None: module = ClassicalADIModule(threshold=None, nreference=None, residuals='mean', extra_rot=0., name_in='cadi1', image_in_tag='science_prep', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi1') data = self.pipeline.get_data('cadi_res') assert np.sum(data) == pytest.approx(0.8381625719865213, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('cadi_stack') assert np.sum(data) == pytest.approx(0.08395606034388256, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) def test_classical_adi_threshold(self) -> None: module = ClassicalADIModule(threshold=(0.1, 0.03, 1.), nreference=5, residuals='median', extra_rot=0., name_in='cadi2', image_in_tag='science_prep', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi2') data = self.pipeline.get_data('cadi_res') assert np.sum(data) == pytest.approx(0.7158207863548083, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('cadi_stack') assert np.sum(data) == pytest.approx(0.07448334552227256, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) def test_psf_subtraction_pca_single(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_single', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_single', res_median_tag='res_median_single', res_weighted_tag='res_weighted_single', res_rot_mean_clip_tag='res_clip_single', res_arr_out_tag='res_arr_single', basis_out_tag='basis_single', extra_rot=45., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_single') data = self.pipeline.get_data('res_mean_single') assert np.sum(data) == pytest.approx(-0.00011857022709778602, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_median_single') assert np.sum(data) == pytest.approx(-0.002184868916566093, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_weighted_single') assert np.sum(data) == pytest.approx(0.08102176735226937, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) # data = self.pipeline.get_data('res_clip_single') # assert np.sum(data) == pytest.approx(7.09495495339349e-05, rel=self.limit, abs=0.) # assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_arr_single1') assert np.sum(data) == pytest.approx(-0.0002751385418691618, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('basis_single') assert np.sum(data) == pytest.approx(0.09438697731322143, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_no_mean', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean') data = self.pipeline.get_data('res_mean_no_mean') assert np.sum(data) == pytest.approx(0.0006081272007585688, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_no_mean') assert np.sum(data) == pytest.approx(5.118005177367776, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref') data = self.pipeline.get_data('res_mean_ref') assert np.sum(data) == pytest.approx(0.0006330226118859073, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref') assert np.sum(data) == pytest.approx(0.0943869773132221, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref_no_mean', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean') data = self.pipeline.get_data('res_mean_ref_no_mean') assert np.sum(data) == pytest.approx(0.0006081272007585764, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref_no_mean') assert np.sum(data) == pytest.approx(5.118005177367774, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_pca_single_mask(self) -> None: pca = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_single_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_single_mask', res_median_tag='res_median_single_mask', res_weighted_tag='res_weighted_single_mask', res_rot_mean_clip_tag='res_clip_single_mask', res_arr_out_tag='res_arr_single_mask', basis_out_tag='basis_single_mask', extra_rot=45., subtract_mean=True) self.pipeline.add_module(pca) self.pipeline.run_module('pca_single_mask') data = self.pipeline.get_data('res_mean_single_mask') assert np.sum(data) == pytest.approx(0.00010696166038626307, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_median_single_mask') assert np.sum(data) == pytest.approx(-0.0021005307611346156, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_weighted_single_mask') assert np.sum(data) == pytest.approx(0.06014309988789256, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_clip_single_mask') # assert np.sum(data) == pytest.approx(9.35120662148806e-05, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_arr_single_mask1') assert np.sum(data) == pytest.approx(0.0006170872862547557, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('basis_single_mask') assert np.sum(data) == pytest.approx(0.08411251293842359, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_no_mean_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean_mask') data = self.pipeline.get_data('res_mean_no_mean_mask') assert np.sum(data) == pytest.approx(2.3542359949502915e-05, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_no_mean_mask') assert np.sum(data) == pytest.approx(5.655460951633232, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_mask', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_mask') data = self.pipeline.get_data('res_mean_ref_mask') assert np.sum(data) == pytest.approx(9.400558926815758e-06, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref_mask') assert np.sum(data) == pytest.approx(0.08411251293842326, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_ref_no_mean_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean_mask') data = self.pipeline.get_data('res_mean_ref_no_mean_mask') assert np.sum(data) == pytest.approx(2.354235994950671e-05, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref_no_mean_mask') assert np.sum(data) == pytest.approx(5.655460951633233, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_pca_multi(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_multi', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_multi', res_median_tag='res_median_multi', res_weighted_tag='res_weighted_multi', res_rot_mean_clip_tag='res_clip_multi', res_arr_out_tag=None, basis_out_tag='basis_multi', extra_rot=45., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi') data_single = self.pipeline.get_data('res_mean_single') data_multi = self.pipeline.get_data('res_mean_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_median_single') data_multi = self.pipeline.get_data('res_median_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_weighted_single') data_multi = self.pipeline.get_data('res_weighted_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('basis_single') data_multi = self.pipeline.get_data('basis_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) def test_psf_subtraction_pca_multi_mask(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_multi_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_multi_mask', res_median_tag='res_median_multi_mask', res_weighted_tag='res_weighted_multi_mask', res_rot_mean_clip_tag='res_clip_multi_mask', res_arr_out_tag=None, basis_out_tag='basis_multi_mask', extra_rot=45., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi_mask') data_single = self.pipeline.get_data('res_mean_single_mask') data_multi = self.pipeline.get_data('res_mean_multi_mask') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_median_single_mask') data_multi = self.pipeline.get_data('res_median_multi_mask') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_weighted_single_mask') data_multi = self.pipeline.get_data('res_weighted_multi_mask') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('basis_single_mask') data_multi = self.pipeline.get_data('basis_multi_mask') assert data_single.shape == data_multi.shape assert data_single == pytest.approx(data_multi, rel=self.limit, abs=0.) def test_psf_subtraction_len_parang(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 1 parang = self.pipeline.get_data('header_science/PARANG') self.pipeline.set_attribute('science_prep', 'PARANG', np.append(parang, 0.), static=False) module = PcaPsfSubtractionModule(pca_numbers=[ 1, ], name_in='pca_len_parang', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_len_parang', extra_rot=0.) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('pca_len_parang') assert str(error.value) == 'The number of images (10) is not equal to the number of ' \ 'parallactic angles (11).'
class TestFluxPosition: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_fake_data(self.test_dir + 'adi') create_star_data(self.test_dir + 'psf', npix=21, pos_star=10.) create_star_data(self.test_dir + 'ref', npix=21, pos_star=10.) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['adi', 'psf', 'ref']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='adi', input_dir=self.test_dir + 'adi') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('adi') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) self.pipeline.set_attribute('adi', 'PARANG', np.linspace(0., 180., 10), static=False) module = FitsReadingModule(name_in='read2', image_tag='psf', input_dir=self.test_dir + 'psf') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('psf') assert np.sum(data) == pytest.approx(108.43655133957289, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) module = FitsReadingModule(name_in='read3', image_tag='ref', input_dir=self.test_dir + 'psf') self.pipeline.add_module(module) self.pipeline.run_module('read3') data = self.pipeline.get_data('ref') assert np.sum(data) == pytest.approx(108.43655133957289, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_aperture_photometry(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = AperturePhotometryModule(name_in='photometry1', image_in_tag='psf', phot_out_tag='photometry1', radius=0.1, position=None) self.pipeline.add_module(module) self.pipeline.run_module('photometry1') with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = AperturePhotometryModule(name_in='photometry2', image_in_tag='psf', phot_out_tag='photometry2', radius=0.1, position=None) self.pipeline.add_module(module) self.pipeline.run_module('photometry2') data = self.pipeline.get_data('photometry1') assert np.sum(data) == pytest.approx(100.80648929590365, rel=self.limit, abs=0.) assert data.shape == (10, 1) data_multi = self.pipeline.get_data('photometry2') assert data.shape == data_multi.shape assert data == pytest.approx(data_multi, rel=self.limit, abs=0.) def test_aperture_photometry_position(self) -> None: module = AperturePhotometryModule(name_in='photometry3', image_in_tag='psf', phot_out_tag='photometry3', radius=0.1, position=(10., 10.)) self.pipeline.add_module(module) self.pipeline.run_module('photometry3') data = self.pipeline.get_data('photometry3') assert np.sum(data) == pytest.approx(100.80648929590365, rel=self.limit, abs=0.) assert data.shape == (10, 1) def test_fake_planet(self) -> None: module = FakePlanetModule(position=(0.2, 180.), magnitude=2.5, psf_scaling=1., interpolation='spline', name_in='fake', image_in_tag='adi', psf_in_tag='psf', image_out_tag='fake') self.pipeline.add_module(module) self.pipeline.run_module('fake') data = self.pipeline.get_data('fake') assert np.sum(data) == pytest.approx(21.51956021269913, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_psf_subtraction(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=[ 1, ], name_in='pca', images_in_tag='fake', reference_in_tag='fake', res_mean_tag='res_mean', extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('pca') data = self.pipeline.get_data('res_mean') assert np.sum(data) == pytest.approx(0.014757351752469366, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) def test_false_positive(self) -> None: module = FalsePositiveModule(position=(10., 2.), aperture=0.06, ignore=True, name_in='false1', image_in_tag='res_mean', snr_out_tag='snr_fpf1', optimize=False) self.pipeline.add_module(module) self.pipeline.run_module('false1') data = self.pipeline.get_data('snr_fpf1') assert data[0, 1] == pytest.approx(2., rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.216, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(180., rel=self.limit, abs=0.) assert data[0, 4] == pytest.approx(23.555448981008507, rel=self.limit, abs=0.) assert data[0, 5] == pytest.approx(3.1561982060476726e-08, rel=self.limit, abs=0.) assert data.shape == (1, 6) def test_false_positive_optimize(self) -> None: module = FalsePositiveModule(position=(10., 2.), aperture=0.06, ignore=True, name_in='false2', image_in_tag='res_mean', snr_out_tag='snr_fpf2', optimize=True, offset=0.1, tolerance=0.01) self.pipeline.add_module(module) self.pipeline.run_module('false2') data = self.pipeline.get_data('snr_fpf2') assert data[0, 1] == pytest.approx(2.0681640624999993, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.21416845852767494, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(179.47800221910444, rel=self.limit, abs=0.) assert data[0, 4] == pytest.approx(24.254455766076823, rel=self.limit, abs=0.) assert data[0, 5] == pytest.approx(2.5776271254831863e-08, rel=self.limit, abs=0.) assert data.shape == (1, 6) def test_simplex_minimization_hessian(self) -> None: module = SimplexMinimizationModule(name_in='simplex1', image_in_tag='fake', psf_in_tag='psf', res_out_tag='simplex_res', flux_position_tag='flux_position', position=(10., 3.), magnitude=2.5, psf_scaling=-1., merit='hessian', aperture=0.06, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.06, edge_size=None, extra_rot=0., reference_in_tag=None, residuals='median', offset=1.) self.pipeline.add_module(module) self.pipeline.run_module('simplex1') data = self.pipeline.get_data('simplex_res') assert np.sum(data) == pytest.approx(0.07079158286664607, rel=self.limit, abs=0.) assert data.shape == (25, 21, 21) data = self.pipeline.get_data('flux_position') assert data[24, 0] == pytest.approx(9.933213305898484, rel=self.limit, abs=0.) assert data[24, 1] == pytest.approx(2.637268518518516, rel=self.limit, abs=0.) assert data[24, 2] == pytest.approx(0.198801928351391, rel=self.limit, abs=0.) assert data[24, 3] == pytest.approx(179.48028924294857, rel=self.limit, abs=0.) assert data[24, 4] == pytest.approx(2.4782450274348378, rel=self.limit, abs=0.) assert data.shape == (25, 6) def test_simplex_minimization_reference(self) -> None: module = SimplexMinimizationModule( name_in='simplex2', image_in_tag='fake', psf_in_tag='psf', res_out_tag='simplex_res_ref', flux_position_tag='flux_position_ref', position=(10., 3.), magnitude=2.5, psf_scaling=-1., merit='poisson', aperture=0.06, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.06, edge_size=None, extra_rot=0., reference_in_tag='ref', residuals='mean') self.pipeline.add_module(module) self.pipeline.run_module('simplex2') data = self.pipeline.get_data('simplex_res_ref') assert np.sum(data) == pytest.approx(9.914746160040783, rel=self.limit, abs=0.) assert data.shape == (28, 21, 21) data = self.pipeline.get_data('flux_position_ref') assert data[27, 0] == pytest.approx(10.049019964116436, rel=self.limit, abs=0.) assert data[27, 1] == pytest.approx(2.6444836362361936, rel=self.limit, abs=0.) assert data[27, 2] == pytest.approx(0.19860335205689572, rel=self.limit, abs=0.) assert data[27, 3] == pytest.approx(180.38183525629643, rel=self.limit, abs=0.) assert data[27, 4] == pytest.approx(2.5496922175196, rel=self.limit, abs=0.) assert data.shape == (28, 6) def test_mcmc_sampling(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = DerotateAndStackModule(name_in='stack', image_in_tag='psf', image_out_tag='psf_stack', derotate=False, stack='mean') self.pipeline.add_module(module) self.pipeline.run_module('stack') data = self.pipeline.get_data('psf_stack') assert np.sum(data) == pytest.approx(10.843655133957288, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) module = MCMCsamplingModule(name_in='mcmc', image_in_tag='adi', psf_in_tag='psf_stack', chain_out_tag='mcmc', param=(0.15, 0., 1.), bounds=((0.1, 0.2), (-2., 2.), (-1., 2.)), nwalkers=6, nsteps=5, psf_scaling=-1., pca_number=1, aperture=(10, 16, 0.06), mask=None, extra_rot=0., merit='gaussian', residuals='median', sigma=(1e-3, 1e-1, 1e-2)) self.pipeline.add_module(module) self.pipeline.run_module('mcmc') def test_systematic_error(self) -> None: module = SystematicErrorModule(name_in='error', image_in_tag='adi', psf_in_tag='psf', offset_out_tag='offset', position=(0.162, 0.), magnitude=5., angles=(0., 180., 2), psf_scaling=1., merit='gaussian', aperture=0.06, tolerance=0.1, pca_number=1, mask=(None, None), extra_rot=0., residuals='median', offset=1.) self.pipeline.add_module(module) self.pipeline.run_module('error') data = self.pipeline.get_data('offset') assert data[0, 0] == pytest.approx(-0.0028749671933526733, rel=self.limit, abs=0.) assert data[0, 1] == pytest.approx(0.2786088210998514, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(-0.02916297162565762, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(-0.02969350583704866, rel=self.limit, abs=0.) assert data[0, 4] == pytest.approx(-0.10640807184499579, rel=self.limit, abs=0.) assert data.shape == (2, 5)
class TestProcessing: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(100, 10, 10)) large_data = np.random.normal(loc=0, scale=2e-4, size=(10000, 100, 100)) with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'w') as hdf_file: hdf_file.create_dataset('images', data=images) hdf_file.create_dataset('large_data', data=large_data) create_star_data(path=self.test_dir + 'images') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) self.pipeline.set_attribute('images', 'PIXSCALE', 0.1, static=True) self.pipeline.set_attribute('large_data', 'PIXSCALE', 0.1, static=True) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_output_port_name(self) -> None: module = FitsReadingModule(name_in='read', image_tag='images', input_dir=self.test_dir + 'images') module.add_output_port('test') with pytest.warns(UserWarning) as warning: module.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ReadingModule \'read\' is already ' \ 'used.' module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='images', image_out_tag='im_out') module.add_output_port('test') with pytest.warns(UserWarning) as warning: module.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ProcessingModule \'badpixel\' is ' \ 'already used.' self.pipeline.m_data_storage.close_connection() def test_apply_function(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 20, static=True) self.pipeline.set_attribute('config', 'CPU', 4, static=True) module = LineSubtractionModule(name_in='subtract', image_in_tag='images', image_out_tag='im_subtract', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract') data = self.pipeline.get_data('images') assert np.allclose(np.mean(data), 1.9545313398209947e-06, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) data = self.pipeline.get_data('im_subtract') assert np.allclose(np.mean(data), 5.529431079676073e-22, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_args_none(self) -> None: module = TimeNormalizationModule(name_in='norm', image_in_tag='images', image_out_tag='im_norm') self.pipeline.add_module(module) self.pipeline.run_module('norm') data = self.pipeline.get_data('im_norm') assert np.allclose(np.mean(data), -3.3117684144801347e-07, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_args_none_memory_none(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) module = TimeNormalizationModule(name_in='norm_none', image_in_tag='images', image_out_tag='im_norm') self.pipeline.add_module(module) self.pipeline.run_module('norm_none') data = self.pipeline.get_data('im_norm') assert np.allclose(np.mean(data), -3.3117684144801347e-07, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_same_port(self) -> None: module = LineSubtractionModule(name_in='subtract_same', image_in_tag='im_subtract', image_out_tag='im_subtract', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract_same') data = self.pipeline.get_data('im_subtract') assert np.allclose(np.mean(data), 7.318364664277155e-22, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_args_none_memory_none_same_port(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) data = self.pipeline.get_data('images') assert np.allclose(np.mean(data), 1.9545313398209947e-06, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) module = TimeNormalizationModule(name_in='norm_none_same', image_in_tag='images', image_out_tag='images') self.pipeline.add_module(module) self.pipeline.run_module('norm_none_same') data = self.pipeline.get_data('images') assert np.allclose(np.mean(data), -3.3117684144801347e-07, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_to_images_memory_none(self) -> None: module = StarExtractionModule(name_in='extract', image_in_tag='im_subtract', image_out_tag='extract', index_out_tag=None, image_size=0.5, fwhm_star=0.1, position=(None, None, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('extract') data = self.pipeline.get_data('extract') assert np.allclose(np.mean(data), 1.5591859111937413e-07, rtol=limit, atol=0.) assert data.shape == (100, 5, 5) def test_multiproc_large_data(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 1000, static=True) self.pipeline.set_attribute('config', 'CPU', 1, static=True) module = LineSubtractionModule(name_in='subtract_single', image_in_tag='large_data', image_out_tag='im_sub_single', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract_single') self.pipeline.set_attribute('config', 'CPU', 4, static=True) module = LineSubtractionModule(name_in='subtract_multi', image_in_tag='large_data', image_out_tag='im_sub_multi', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract_multi') data_single = self.pipeline.get_data('im_sub_single') data_multi = self.pipeline.get_data('im_sub_multi') assert np.allclose(data_single, data_multi, rtol=limit, atol=0.) assert data_single.shape == data_multi.shape
class TestTextReading(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_random(self.test_dir, ndit=1) create_config(self.test_dir+"PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, files=["image.dat", "parang.dat", "attribute.dat", "data.dat"]) def test_input_data(self): data = self.pipeline.get_data("images") assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.9545313398209947e-06, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) def test_text_writing(self): text_write = TextWritingModule(file_name="image.dat", name_in="text_write", output_dir=None, data_tag="images", header=None) self.pipeline.add_module(text_write) self.pipeline.run_module("text_write") data = np.loadtxt(self.test_dir+"image.dat") assert np.allclose(data[75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.9545313398209947e-06, rtol=limit, atol=0.) assert data.shape == (100, 100) def test_text_writing_string(self): with pytest.raises(ValueError) as error: TextWritingModule(file_name=0., name_in="text_write", output_dir=None, data_tag="images", header=None) assert str(error.value) == "Output 'file_name' needs to be a string." def test_text_writing_ndim(self): data_4d = np.random.normal(loc=0, scale=2e-4, size=(5, 5, 5, 5)) h5f = h5py.File(self.test_dir+"PynPoint_database.hdf5", "a") h5f.create_dataset("data_4d", data=data_4d) h5f.close() text_write = TextWritingModule(file_name="data.dat", name_in="write_4d", output_dir=None, data_tag="data_4d", header=None) self.pipeline.add_module(text_write) with pytest.raises(ValueError) as error: self.pipeline.run_module("write_4d") assert str(error.value) == "Only 1D or 2D arrays can be written to a text file." def test_text_writing_int(self): data_int = np.arange(1, 101, 1) h5f = h5py.File(self.test_dir+"PynPoint_database.hdf5", "a") h5f.create_dataset("data_int", data=data_int) h5f.close() text_write = TextWritingModule(file_name="data.dat", name_in="write_int", output_dir=None, data_tag="data_int", header=None) self.pipeline.add_module(text_write) self.pipeline.run_module("write_int") data = np.loadtxt(self.test_dir+"data.dat") assert np.allclose(data, data_int, rtol=limit, atol=0.) assert data.shape == (100, ) def test_parang_writing(self): parang_write = ParangWritingModule(file_name="parang.dat", name_in="parang_write1", output_dir=None, data_tag="images", header=None) self.pipeline.add_module(parang_write) self.pipeline.run_module("parang_write1") data = np.loadtxt(self.test_dir+"parang.dat") assert np.allclose(data[0], 1.0, rtol=limit, atol=0.) assert np.allclose(data[9], 10.0, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 5.5, rtol=limit, atol=0.) assert data.shape == (10, ) def test_parang_writing_string(self): with pytest.raises(ValueError) as error: ParangWritingModule(file_name=0., name_in="parang_write2", output_dir=None, data_tag="images", header=None) assert str(error.value) == "Output 'file_name' needs to be a string." def test_attribute_writing(self): attr_write = AttributeWritingModule(file_name="attribute.dat", name_in="attr_write1", output_dir=None, data_tag="images", attribute="PARANG", header=None) self.pipeline.add_module(attr_write) self.pipeline.run_module("attr_write1") data = np.loadtxt(self.test_dir+"attribute.dat") assert np.allclose(data[0], 1.0, rtol=limit, atol=0.) assert np.allclose(data[9], 10.0, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 5.5, rtol=limit, atol=0.) assert data.shape == (10, ) def test_attribute_writing_string(self): with pytest.raises(ValueError) as error: AttributeWritingModule(file_name=0., name_in="attr_write2", output_dir=None, data_tag="images", attribute="PARANG", header=None) assert str(error.value) == "Output 'file_name' needs to be a string." def test_attribute_not_present(self): attr_write = AttributeWritingModule(file_name="attribute.dat", name_in="attr_write3", output_dir=None, data_tag="images", attribute="test", header=None) self.pipeline.add_module(attr_write) with pytest.raises(ValueError) as error: self.pipeline.run_module("attr_write3") assert str(error.value) == "The 'test' attribute is not present in 'images'." def test_parang_writing_not_present(self): h5f = h5py.File(self.test_dir+"PynPoint_database.hdf5", "a") del h5f["header_images/PARANG"] h5f.close() parang_write = ParangWritingModule(file_name="parang.dat", name_in="parang_write3", output_dir=None, data_tag="images", header=None) self.pipeline.add_module(parang_write) with pytest.raises(ValueError) as error: self.pipeline.run_module("parang_write3") assert str(error.value) == "The PARANG attribute is not present in 'images'."
class TestFrameSelection: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'images', ndit=10, nframes=11) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['images']) def test_read_data(self): module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir + 'images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0001002167910262529, rtol=limit, atol=0.) assert data.shape == (44, 100, 100) def test_remove_last_frame(self): module = RemoveLastFrameModule(name_in='last', image_in_tag='read', image_out_tag='last') self.pipeline.add_module(module) self.pipeline.run_module('last') data = self.pipeline.get_data('last') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010020258903646778, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) self.pipeline.set_attribute('last', 'PARANG', np.arange(0., 40., 1.), static=False) star = np.zeros((40, 2)) star[:, 0] = np.arange(40., 80., 1.) star[:, 1] = np.arange(40., 80., 1.) self.pipeline.set_attribute('last', 'STAR_POSITION', star, static=False) attribute = self.pipeline.get_attribute('last', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 19.5, rtol=limit, atol=0.) assert attribute.shape == (40, ) attribute = self.pipeline.get_attribute('last', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 59.5, rtol=limit, atol=0.) assert attribute.shape == (40, 2) def test_remove_start_frame(self): module = RemoveStartFramesModule(frames=2, name_in='start', image_in_tag='last', image_out_tag='start') self.pipeline.add_module(module) self.pipeline.run_module('start') data = self.pipeline.get_data('start') assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010011298467340513, rtol=limit, atol=0.) assert data.shape == (32, 100, 100) attribute = self.pipeline.get_attribute('start', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 20.5, rtol=limit, atol=0.) assert attribute.shape == (32, ) attribute = self.pipeline.get_attribute('start', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 60.5, rtol=limit, atol=0.) assert attribute.shape == (32, 2) def test_remove_frames(self): module = RemoveFramesModule(name_in='remove', image_in_tag='start', selected_out_tag='selected', removed_out_tag='removed', frames=[5, 8, 13, 25, 31]) self.pipeline.add_module(module) self.pipeline.run_module('remove') data = self.pipeline.get_data('selected') assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.984682304434105e-05, rtol=limit, atol=0.) assert data.shape == (27, 100, 100) data = self.pipeline.get_data('removed') assert np.allclose(data[0, 50, 50], 0.09818692015286978, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010155025747035087, rtol=limit, atol=0.) assert data.shape == (5, 100, 100) attribute = self.pipeline.get_attribute('selected', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 20.296296296296298, rtol=limit, atol=0.) assert attribute.shape == (27, ) attribute = self.pipeline.get_attribute('selected', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 60.2962962962963, rtol=limit, atol=0.) assert attribute.shape == (27, 2) attribute = self.pipeline.get_attribute('removed', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 21.6, rtol=limit, atol=0.) assert attribute.shape == (5, ) attribute = self.pipeline.get_attribute('removed', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 61.6, rtol=limit, atol=0.) assert attribute.shape == (5, 2) def test_frame_selection(self): module = FrameSelectionModule(name_in='select1', image_in_tag='start', selected_out_tag='selected1', removed_out_tag='removed1', index_out_tag='index1', method='median', threshold=1., fwhm=0.1, aperture=('circular', 0.2), position=(None, None, 0.5)) self.pipeline.add_module(module) self.pipeline.run_module('select1') data = self.pipeline.get_data('selected1') assert np.allclose(data[0, 50, 50], 0.09791350617182591, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.980792188317311e-05, rtol=limit, atol=0.) assert data.shape == (22, 100, 100) data = self.pipeline.get_data('removed1') assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010078412281191547, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) data = self.pipeline.get_data('index1') assert data[-1] == 28 assert np.sum(data) == 115 assert data.shape == (10, ) attribute = self.pipeline.get_attribute('selected1', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 22.681818181818183, rtol=limit, atol=0.) assert attribute.shape == (22, ) attribute = self.pipeline.get_attribute('selected1', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (22, 2) attribute = self.pipeline.get_attribute('removed1', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 15.7, rtol=limit, atol=0.) assert attribute.shape == (10, ) attribute = self.pipeline.get_attribute('removed1', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (10, 2) module = FrameSelectionModule(name_in='select2', image_in_tag='start', selected_out_tag='selected2', removed_out_tag='removed2', index_out_tag='index2', method='max', threshold=3., fwhm=0.1, aperture=('annulus', 0.1, 0.2), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select2') data = self.pipeline.get_data('selected2') assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010037996502199598, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('removed2') assert np.allclose(data[0, 50, 50], 0.097912284606689, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.966801742575358e-05, rtol=limit, atol=0.) assert data.shape == (12, 100, 100) data = self.pipeline.get_data('index2') assert data[-1] == 30 assert np.sum(data) == 230 assert data.shape == (12, ) attribute = self.pipeline.get_attribute('selected2', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 17.8, rtol=limit, atol=0.) assert attribute.shape == (20, ) attribute = self.pipeline.get_attribute('selected2', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (20, 2) attribute = self.pipeline.get_attribute('removed2', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 25.0, rtol=limit, atol=0.) assert attribute.shape == (12, ) attribute = self.pipeline.get_attribute('removed2', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (12, 2) def test_image_statistics_full(self): module = ImageStatisticsModule(name_in='stat1', image_in_tag='read', stat_out_tag='stat1', position=None) self.pipeline.add_module(module) self.pipeline.run_module('stat1') data = self.pipeline.get_data('stat1') assert np.allclose(data[0, 0], -0.0007312880198509591, rtol=limit, atol=0.) assert np.allclose(np.sum(data), 48.479917666979716, rtol=limit, atol=0.) assert data.shape == (44, 6) def test_image_statistics_posiiton(self): module = ImageStatisticsModule(name_in='stat2', image_in_tag='read', stat_out_tag='stat2', position=(70, 20, 0.5)) self.pipeline.add_module(module) self.pipeline.run_module('stat2') data = self.pipeline.get_data('stat2') assert np.allclose(data[0, 0], -0.0006306714900382097, rtol=limit, atol=0.) assert np.allclose(np.sum(data), -0.05448258074038106, rtol=limit, atol=0.) assert data.shape == (44, 6) def test_frame_similarity_mse(self): module = FrameSimilarityModule(name_in='simi1', image_tag='read', method='MSE', mask_radius=(0., 1.)) self.pipeline.add_module(module) self.pipeline.run_module('simi1') similarity = self.pipeline.get_attribute('read', 'MSE', static=False) assert len(similarity) == self.pipeline.get_shape('read')[0] assert np.min(similarity) > 0 assert similarity[4] != similarity[8] assert np.allclose(np.sum(similarity), 1.7938335695664495e-06, rtol=limit, atol=0.) assert np.allclose(similarity[0], 4.103008589430469e-08, rtol=limit, atol=0.) def test_frame_similarity_pcc(self): module = FrameSimilarityModule(name_in='simi2', image_tag='read', method='PCC', mask_radius=(0., 1.)) self.pipeline.add_module(module) self.pipeline.run_module('simi2') similarity = self.pipeline.get_attribute('read', 'PCC', static=False) assert len(similarity) == self.pipeline.get_shape('read')[0] assert np.min(similarity) > 0 assert np.max(similarity) < 1 assert similarity[4] != similarity[8] assert np.allclose(np.sum(similarity), 43.974652830856314, rtol=limit, atol=0.) assert np.allclose(similarity[0], 0.9994193494590345, rtol=limit, atol=0.) def test_frame_similarity_ssim(self): module = FrameSimilarityModule(name_in='simi3', image_tag='read', method='SSIM', mask_radius=(0., 1.), temporal_median='constant') self.pipeline.add_module(module) self.pipeline.run_module('simi3') similarity = self.pipeline.get_attribute('read', 'SSIM', static=False) assert len(similarity) == self.pipeline.get_shape('read')[0] assert np.min(similarity) > 0 assert np.max(similarity) < 1 assert similarity[4] != similarity[8] assert np.allclose(np.sum(similarity), 43.999059977871184, rtol=limit, atol=0.) assert np.allclose(similarity[0], 0.9999793908738922, rtol=limit, atol=0.) def test_select_by_attribute(self): total_length = self.pipeline.get_shape('read')[0] self.pipeline.set_attribute('read', 'INDEX', range(total_length), static=False) attribute_tag = 'SSIM' module = SelectByAttributeModule(name_in='frame_removal_1', image_in_tag='read', attribute_tag=attribute_tag, number_frames=6, order='descending', selected_out_tag='select_sim', removed_out_tag='remove_sim') self.pipeline.add_module(module) self.pipeline.run_module('frame_removal_1') index = self.pipeline.get_attribute('select_sim', 'INDEX', static=False) similarity = self.pipeline.get_attribute('select_sim', attribute_tag, static=False) sim_removed = self.pipeline.get_attribute('remove_sim', attribute_tag, static=False) # check attribute length assert self.pipeline.get_shape('select_sim')[0] == 6 assert len(similarity) == 6 assert len(similarity) == len(index) assert len(similarity) + len(sim_removed) == total_length # check sorted assert all(similarity[i] >= similarity[i + 1] for i in range(len(similarity) - 1)) # check that the selected attributes are in the correct tags assert np.min(similarity) > np.max(sim_removed)
class TestAttributeReading: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir, ndit=10, parang=None) create_config(self.test_dir + 'PynPoint_config.ini') np.savetxt(self.test_dir + 'parang.dat', np.arange(1., 11., 1.)) np.savetxt(self.test_dir + 'new.dat', np.arange(10., 21., 1.)) np.savetxt(self.test_dir + 'attribute.dat', np.arange(1, 11, 1), fmt='%i') np.savetxt(self.test_dir + 'wavelength.dat', np.arange(1, 11, 1)) data2d = np.random.normal(loc=0, scale=2e-4, size=(10, 10)) np.savetxt(self.test_dir + 'data_2d.dat', data2d) self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, files=[ 'parang.dat', 'new.dat', 'attribute.dat', 'data_2d.dat', 'wavelength.dat' ]) def test_input_data(self) -> None: data = self.pipeline.get_data('images') assert np.allclose(data[0, 75, 25], 6.921353838812206e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) def test_parang_reading(self) -> None: module = ParangReadingModule(file_name='parang.dat', name_in='parang1', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('parang1') data = self.pipeline.get_data('header_images/PARANG') assert data.dtype == 'float64' assert np.allclose(data, np.arange(1., 11., 1.), rtol=limit, atol=0.) assert data.shape == (10, ) def test_parang_reading_same(self) -> None: module = ParangReadingModule(file_name='parang.dat', name_in='parang2', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('parang2') assert len(warning) == 1 assert warning[0].message.args[0] == 'The PARANG attribute is already present and ' \ 'contains the same values as are present in ' \ 'parang.dat.' def test_parang_reading_present(self) -> None: module = ParangReadingModule(file_name='new.dat', name_in='parang3', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('parang3') assert len(warning) == 1 assert warning[0].message.args[0] == 'The PARANG attribute is already present. Set the ' \ '\'overwrite\' parameter to True in order to ' \ 'overwrite the values with new.dat.' def test_parang_reading_overwrite(self) -> None: module = ParangReadingModule(file_name='new.dat', name_in='parang4', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('parang4') def test_parang_reading_2d(self) -> None: module = ParangReadingModule(file_name='data_2d.dat', name_in='parang6', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('parang6') assert str(error.value) == 'The input file data_2d.dat should contain a 1D data set with ' \ 'the parallactic angles.' def test_attribute_reading(self) -> None: module = AttributeReadingModule(file_name='attribute.dat', attribute='EXP_NO', name_in='attribute1', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('attribute1') data = self.pipeline.get_data('header_images/EXP_NO') assert data.dtype == 'int64' assert np.allclose(data, np.arange(1, 11, 1), rtol=limit, atol=0.) assert data.shape == (10, ) def test_attribute_reading_present(self) -> None: module = AttributeReadingModule(file_name='parang.dat', attribute='PARANG', name_in='attribute3', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('attribute3') assert warning[0].message.args[0] == 'The attribute \'PARANG\' is already present. Set ' \ 'the \'overwrite\' parameter to True in order to ' \ 'overwrite the values with parang.dat.' def test_attribute_reading_invalid(self) -> None: module = AttributeReadingModule(file_name='attribute.dat', attribute='test', name_in='attribute4', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('attribute4') assert str(error.value) == '\'test\' is not a valid attribute.' def test_attribute_reading_2d(self) -> None: module = AttributeReadingModule(file_name='data_2d.dat', attribute='DITHER_X', name_in='attribute5', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('attribute5') assert str(error.value) == 'The input file data_2d.dat should contain a 1D list with ' \ 'attributes.' def test_attribute_reading_same(self) -> None: module = AttributeReadingModule(file_name='attribute.dat', attribute='EXP_NO', name_in='attribute6', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('attribute6') assert len(warning) == 1 assert warning[0].message.args[0] == 'The \'EXP_NO\' attribute is already present and ' \ 'contains the same values as are present in ' \ 'attribute.dat.' def test_attribute_reading_overwrite(self) -> None: module = AttributeReadingModule(file_name='parang.dat', attribute='PARANG', name_in='attribute7', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('attribute7') attribute = self.pipeline.get_attribute('images', 'PARANG', static=False) assert np.allclose(attribute, np.arange(1., 11., 1.), rtol=limit, atol=0.) def test_wavelength_reading(self) -> None: module = WavelengthReadingModule(file_name='wavelength.dat', name_in='wavelength1', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('wavelength1') data = self.pipeline.get_data('header_images/WAVELENGTH') assert data.dtype == 'float64' assert np.allclose(data, np.arange(1., 11., 1.), rtol=limit, atol=0.) assert data.shape == (10, ) def test_wavelength_reading_same(self) -> None: module = WavelengthReadingModule(file_name='wavelength.dat', name_in='wavelength2', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('wavelength2') assert len(warning) == 1 assert warning[0].message.args[0] == 'The WAVELENGTH attribute is already present and ' \ 'contains the same values as are present in ' \ 'wavelength.dat.' def test_wavelength_reading_present(self) -> None: module = WavelengthReadingModule(file_name='new.dat', name_in='wavelength3', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('wavelength3') assert len(warning) == 1 assert warning[0].message.args[0] == 'The WAVELENGTH attribute is already present. Set ' \ 'the \'overwrite\' parameter to True in order to ' \ 'overwrite the values with new.dat.' def test_wavelength_reading_overwrite(self) -> None: module = WavelengthReadingModule(file_name='new.dat', name_in='wavelength4', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('wavelength4') def test_wavelength_reading_2d(self) -> None: module = WavelengthReadingModule(file_name='data_2d.dat', name_in='wavelength6', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('wavelength6') assert str(error.value) == 'The input file data_2d.dat should contain a 1D data set with ' \ 'the wavelengths.'
class TestTimeDenoising: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir + 'images') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='images', input_dir=self.test_dir + 'images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('images') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_wavelet_denoising_cwt_dog(self) -> None: cwt_config = CwtWaveletConfiguration(wavelet='dog', wavelet_order=2, keep_mean=False, resolution=0.5) assert cwt_config.m_wavelet == 'dog' assert cwt_config.m_wavelet_order == 2 assert not cwt_config.m_keep_mean assert cwt_config.m_resolution == 0.5 module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_cwt_dog', image_in_tag='images', image_out_tag='wavelet_cwt_dog', padding='zero', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_cwt_dog') data = self.pipeline.get_data('wavelet_cwt_dog') assert np.sum(data) == pytest.approx(105.1035789572968, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 self.pipeline.run_module('wavelet_cwt_dog') data_multi = self.pipeline.get_data('wavelet_cwt_dog') assert data == pytest.approx(data_multi, rel=self.limit, abs=0.) assert data.shape == data_multi.shape def test_wavelet_denoising_cwt_morlet(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 cwt_config = CwtWaveletConfiguration(wavelet='morlet', wavelet_order=5, keep_mean=False, resolution=0.5) assert cwt_config.m_wavelet == 'morlet' assert cwt_config.m_wavelet_order == 5 assert not cwt_config.m_keep_mean assert cwt_config.m_resolution == 0.5 module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_cwt_morlet', image_in_tag='images', image_out_tag='wavelet_cwt_morlet', padding='mirror', median_filter=False, threshold_function='hard') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_cwt_morlet') data = self.pipeline.get_data('wavelet_cwt_morlet') assert np.sum(data) == pytest.approx(104.86262840716438, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) data = self.pipeline.get_attribute('wavelet_cwt_morlet', 'NFRAMES', static=False) assert data[0] == data[1] == 5 def test_wavelet_denoising_dwt(self) -> None: dwt_config = DwtWaveletConfiguration(wavelet='db8') assert dwt_config.m_wavelet == 'db8' module = WaveletTimeDenoisingModule(wavelet_configuration=dwt_config, name_in='wavelet_dwt', image_in_tag='images', image_out_tag='wavelet_dwt', padding='zero', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_dwt') data = self.pipeline.get_data('wavelet_dwt') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_time_normalization(self) -> None: module = TimeNormalizationModule(name_in='timenorm', image_in_tag='images', image_out_tag='timenorm') self.pipeline.add_module(module) self.pipeline.run_module('timenorm') data = self.pipeline.get_data('timenorm') assert np.sum(data) == pytest.approx(56.443663773873, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_wavelet_denoising_even_size(self) -> None: module = AddLinesModule(name_in='add', image_in_tag='images', image_out_tag='images_even', lines=(1, 0, 1, 0)) self.pipeline.add_module(module) self.pipeline.run_module('add') data = self.pipeline.get_data('images_even') assert np.sum(data) == pytest.approx(105.54278879805275, rel=self.limit, abs=0.) assert data.shape == (10, 12, 12) cwt_config = CwtWaveletConfiguration(wavelet='dog', wavelet_order=2, keep_mean=False, resolution=0.5) assert cwt_config.m_wavelet == 'dog' assert cwt_config.m_wavelet_order == 2 assert not cwt_config.m_keep_mean assert cwt_config.m_resolution == 0.5 module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_even_1', image_in_tag='images_even', image_out_tag='wavelet_even_1', padding='zero', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_even_1') data = self.pipeline.get_data('wavelet_even_1') assert np.sum(data) == pytest.approx(105.1035789572968, rel=self.limit, abs=0.) assert data.shape == (10, 12, 12) module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_even_2', image_in_tag='images_even', image_out_tag='wavelet_even_2', padding='mirror', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_even_2') data = self.pipeline.get_data('wavelet_even_2') assert np.sum(data) == pytest.approx(105.06809820408587, rel=self.limit, abs=0.) assert data.shape == (10, 12, 12) data = self.pipeline.get_attribute('images', 'NFRAMES', static=False) assert data == pytest.approx([5, 5], rel=self.limit, abs=0.) data = self.pipeline.get_attribute('wavelet_even_1', 'NFRAMES', static=False) assert data == pytest.approx([5, 5], rel=self.limit, abs=0.) data = self.pipeline.get_attribute('wavelet_even_2', 'NFRAMES', static=False) assert data == pytest.approx([5, 5], rel=self.limit, abs=0.)
class TestBackground: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_fake(path=self.test_dir + 'dither', ndit=[20, 20, 20, 20], nframes=[20, 20, 20, 20], exp_no=[1, 2, 3, 4], npix=(100, 100), fwhm=3., x0=[25, 75, 75, 25], y0=[75, 75, 25, 25], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_fake(path=self.test_dir + 'star', ndit=[10, 10, 10, 10], nframes=[10, 10, 10, 10], exp_no=[1, 3, 5, 7], npix=(100, 100), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_fake(path=self.test_dir + 'sky', ndit=[5, 5, 5, 5], nframes=[5, 5, 5, 5], exp_no=[2, 4, 6, 8], npix=(100, 100), fwhm=None, x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_fake(path=self.test_dir + 'line', ndit=[4, 4, 4, 4], nframes=[4, 4, 4, 4], exp_no=[1, 3, 5, 7], npix=(20, 20), fwhm=3., x0=[10, 10, 10, 10], y0=[10, 10, 10, 10], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['dither', 'star', 'sky', 'line']) def test_read_data(self): read = FitsReadingModule(name_in='read1', image_tag='dither', input_dir=self.test_dir + 'dither') self.pipeline.add_module(read) read = FitsReadingModule(name_in='read2', image_tag='star', input_dir=self.test_dir + 'star') self.pipeline.add_module(read) read = FitsReadingModule(name_in='read3', image_tag='sky', input_dir=self.test_dir + 'sky') self.pipeline.add_module(read) read = FitsReadingModule(name_in='read4', image_tag='line', input_dir=self.test_dir + 'line') self.pipeline.add_module(read) self.pipeline.run_module('read1') self.pipeline.run_module('read2') self.pipeline.run_module('read3') self.pipeline.run_module('read4') data = self.pipeline.get_data('dither') assert np.allclose(data[0, 74, 24], 0.05304008435511765, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010033896953157959, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('star') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) data = self.pipeline.get_data('sky') assert np.allclose(data[0, 50, 50], -7.613171257478652e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 8.937360237872607e-07, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('line') assert np.allclose(data[0, 10, 10], 0.09799496683489618, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.002502384977510189, rtol=limit, atol=0.) assert data.shape == (16, 20, 20) def test_simple_background(self): simple = SimpleBackgroundSubtractionModule(shift=20, name_in='simple', image_in_tag='dither', image_out_tag='simple') self.pipeline.add_module(simple) self.pipeline.run_module('simple') data = self.pipeline.get_data('simple') assert np.allclose(data[0, 74, 74], -0.05288064325101517, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 2.7755575615628916e-22, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) def test_mean_background_shift(self): mean = MeanBackgroundSubtractionModule(shift=20, cubes=1, name_in='mean2', image_in_tag='dither', image_out_tag='mean2') self.pipeline.add_module(mean) self.pipeline.run_module('mean2') data = self.pipeline.get_data('mean2') assert np.allclose(data[0, 74, 24], 0.0530465391626132, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.3970872216676808e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) def test_mean_background_nframes(self): mean = MeanBackgroundSubtractionModule(shift=None, cubes=1, name_in='mean1', image_in_tag='dither', image_out_tag='mean1') self.pipeline.add_module(mean) self.pipeline.run_module('mean1') data = self.pipeline.get_data('mean1') assert np.allclose(data[0, 74, 24], 0.0530465391626132, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.3970872216676808e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) def test_dithering_attributes(self): pca_dither = DitheringBackgroundModule(name_in='pca_dither1', image_in_tag='dither', image_out_tag='pca_dither1', center=None, cubes=None, size=0.8, gaussian=0.1, subframe=0.5, pca_number=5, mask_star=0.1, crop=True, prepare=True, pca_background=True, combine='pca') self.pipeline.add_module(pca_dither) self.pipeline.run_module('pca_dither1') data = self.pipeline.get_data('dither_dither_crop1') assert np.allclose(data[0, 14, 14], 0.05304008435511765, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0002606205855710527, rtol=1e-6, atol=0.) assert data.shape == (80, 31, 31) data = self.pipeline.get_data('dither_dither_star1') assert np.allclose(data[0, 14, 14], 0.05304008435511765, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0010414302265833978, rtol=1e-6, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('dither_dither_mean1') assert np.allclose(data[0, 14, 14], 0.0530465391626132, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0010426228104479674, rtol=1e-6, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('dither_dither_background1') assert np.allclose(data[0, 14, 14], -0.00010629310882411674, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 3.5070523360436835e-07, rtol=1e-6, atol=0.) assert data.shape == (60, 31, 31) data = self.pipeline.get_data('dither_dither_pca_fit1') assert np.allclose(data[0, 14, 14], 1.5196412298279846e-05, rtol=1e-5, atol=0.) assert np.allclose(np.mean(data), 1.9779802529804516e-07, rtol=1e-4, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('dither_dither_pca_res1') assert np.allclose(data[0, 14, 14], 0.05302488794281937, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0010412324285580998, rtol=1e-6, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('dither_dither_pca_mask1') assert np.allclose(data[0, 14, 14], 0., rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.9531737773152965, rtol=1e-6, atol=0.) assert data.shape == (20, 31, 31) data = self.pipeline.get_data('pca_dither1') assert np.allclose(data[0, 14, 14], 0.05302488794281937, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.001040627977720779, rtol=1e-6, atol=0.) assert data.shape == (80, 31, 31) data = self.pipeline.get_attribute('dither_dither_pca_res1', 'STAR_POSITION', static=False) assert np.allclose(data[0, 0], [15., 15.], rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 15., rtol=1e-6, atol=0.) assert data.shape == (20, 2) def test_dithering_center(self): pca_dither = DitheringBackgroundModule(name_in='pca_dither2', image_in_tag='dither', image_out_tag='pca_dither2', center=((25., 75.), (75., 75.), (75., 25.), (25., 25.)), cubes=1, size=0.8, gaussian=0.1, subframe=None, pca_number=5, mask_star=0.1, bad_pixel=None, crop=True, prepare=True, pca_background=True, combine='pca') self.pipeline.add_module(pca_dither) self.pipeline.run_module('pca_dither2') data = self.pipeline.get_data('pca_dither2') assert np.allclose(data[0, 14, 14], 0.05302488794328089, rtol=1e-6, atol=0.) assert np.allclose(np.mean(data), 0.0010406279782666378, rtol=1e-3, atol=0.) assert data.shape == (80, 31, 31) def test_nodding_background(self): mean = StackCubesModule(name_in='mean', image_in_tag='sky', image_out_tag='mean', combine='mean') self.pipeline.add_module(mean) self.pipeline.run_module('mean') data = self.pipeline.get_data('mean') assert np.allclose(data[0, 50, 50], 1.270877476321969e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 8.937360237872607e-07, rtol=limit, atol=0.) assert data.shape == (4, 100, 100) attribute = self.pipeline.get_attribute('mean', 'INDEX', static=False) assert np.allclose(np.mean(attribute), 1.5, rtol=limit, atol=0.) assert attribute.shape == (4, ) attribute = self.pipeline.get_attribute('mean', 'NFRAMES', static=False) assert np.allclose(np.mean(attribute), 1, rtol=limit, atol=0.) assert attribute.shape == (4, ) nodding = NoddingBackgroundModule(name_in='nodding', sky_in_tag='mean', science_in_tag='star', image_out_tag='nodding', mode='both') self.pipeline.add_module(nodding) self.pipeline.run_module('nodding') data = self.pipeline.get_data('nodding') assert np.allclose(data[0, 50, 50], 0.09797142624717381, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.945087327935862e-05, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_line_background_mean(self): module = LineSubtractionModule(name_in='line1', image_in_tag='line', image_out_tag='line_mean', combine='mean', mask=0.1) self.pipeline.add_module(module) self.pipeline.run_module('line1') data = self.pipeline.get_data('line_mean') assert np.allclose(data[0, 10, 10], 0.09792388324443534, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0024245904637616735, rtol=limit, atol=0.) assert data.shape == (16, 20, 20) def test_line_background_median(self): module = LineSubtractionModule(name_in='line2', image_in_tag='line', image_out_tag='line_median', combine='median', mask=0.1) self.pipeline.add_module(module) self.pipeline.run_module('line2') data = self.pipeline.get_data('line_median') assert np.allclose(data[0, 10, 10], 0.09782789699611127, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0024723022374338196, rtol=limit, atol=0.) assert data.shape == (16, 20, 20)
class TestFitsWritingModule: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'fits') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): files = [ 'test.fits', 'test000.fits', 'test001.fits', 'test002.fits', 'test003.fits' ] remove_test_data(self.test_dir, folders=['fits'], files=files) def test_fits_reading(self): module = FitsReadingModule(name_in='read', input_dir=self.test_dir + 'fits', image_tag='images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('images') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_fits_writing(self): module = FitsWritingModule(file_name='test.fits', name_in='write1', output_dir=None, data_tag='images', data_range=None, overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('write1') def test_filename_extension(self): with pytest.raises(ValueError) as error: FitsWritingModule(file_name='test.dat', name_in='write3', output_dir=None, data_tag='images', data_range=None, overwrite=True, subset_size=None) assert str( error.value) == 'Output \'file_name\' requires the FITS extension.' def test_data_range(self): module = FitsWritingModule(file_name='test.fits', name_in='write4', output_dir=None, data_tag='images', data_range=(0, 10), overwrite=True, subset_size=None) self.pipeline.add_module(module) self.pipeline.run_module('write4') def test_not_overwritten(self): module = FitsWritingModule(file_name='test.fits', name_in='write5', output_dir=None, data_tag='images', data_range=None, overwrite=False, subset_size=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('write5') assert len(warning) == 1 assert warning[0].message.args[0] == 'Filename already present. Use overwrite=True ' \ 'to overwrite an existing FITS file.' def test_subset_size(self): module = FitsWritingModule(file_name='test.fits', name_in='write6', output_dir=None, data_tag='images', data_range=None, overwrite=True, subset_size=10) self.pipeline.add_module(module) self.pipeline.run_module('write6') def test_subset_size_data_range(self): module = FitsWritingModule(file_name='test.fits', name_in='write7', output_dir=None, data_tag='images', data_range=(8, 18), overwrite=True, subset_size=10) self.pipeline.add_module(module) self.pipeline.run_module('write7') def test_attribute_length(self): text = 'long_text_long_text_long_text_long_text_long_text_long_text_long_text_long_text' self.pipeline.set_attribute('images', 'short', 'value', static=True) self.pipeline.set_attribute('images', 'longer_than_eight1', 'value', static=True) self.pipeline.set_attribute('images', 'longer_than_eight2', text, static=True) module = FitsWritingModule(file_name='test.fits', name_in='write8', output_dir=None, data_tag='images', data_range=None, overwrite=True, subset_size=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('write8') assert len(warning) == 1 assert warning[0].message.args[0] == 'Key \'hierarch longer_than_eight2\' with value ' \ '\'long_text_long_text_long_text_long_text_long_' \ 'text_long_text_long_text_long_text\' is too ' \ 'long for the FITS format. To avoid an error, ' \ 'the value was truncated to \'long_text_long_text' \ '_long_text_long_text_long_tex\'.'
class TestFrameSelection: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir + 'images') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir + 'images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) attr = self.pipeline.get_attribute('read', 'NDIT', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('read', 'NFRAMES', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (2, ) self.pipeline.set_attribute('read', 'NDIT', [4, 4], static=False) def test_remove_last_frame(self) -> None: module = RemoveLastFrameModule(name_in='last', image_in_tag='read', image_out_tag='last') self.pipeline.add_module(module) self.pipeline.run_module('last') data = self.pipeline.get_data('last') assert np.sum(data) == pytest.approx(84.68885503527224, rel=self.limit, abs=0.) assert data.shape == (8, 11, 11) self.pipeline.set_attribute('last', 'PARANG', np.arange(8.), static=False) self.pipeline.set_attribute('last', 'STAR_POSITION', np.full((8, 2), 5.), static=False) attr = self.pipeline.get_attribute('last', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(28., rel=self.limit, abs=0.) assert attr.shape == (8, ) attr = self.pipeline.get_attribute('last', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(80., rel=self.limit, abs=0.) assert attr.shape == (8, 2) def test_remove_start_frame(self) -> None: module = RemoveStartFramesModule(frames=1, name_in='start', image_in_tag='last', image_out_tag='start') self.pipeline.add_module(module) self.pipeline.run_module('start') data = self.pipeline.get_data('start') assert np.sum(data) == pytest.approx(64.44307047549808, rel=self.limit, abs=0.) assert data.shape == (6, 11, 11) attr = self.pipeline.get_attribute('start', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(24., rel=self.limit, abs=0.) assert attr.shape == (6, ) attr = self.pipeline.get_attribute('start', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(60., rel=self.limit, abs=0.) assert attr.shape == (6, 2) def test_remove_frames(self) -> None: module = RemoveFramesModule(name_in='remove', image_in_tag='start', selected_out_tag='selected', removed_out_tag='removed', frames=[2, 5]) self.pipeline.add_module(module) self.pipeline.run_module('remove') data = self.pipeline.get_data('selected') assert np.sum(data) == pytest.approx(43.68337741822863, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('removed') assert np.sum(data) == pytest.approx(20.759693057269445, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) attr = self.pipeline.get_attribute('selected', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(14., rel=self.limit, abs=0.) assert attr.shape == (4, ) attr = self.pipeline.get_attribute('selected', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(40., rel=self.limit, abs=0.) assert attr.shape == (4, 2) attr = self.pipeline.get_attribute('removed', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(10., rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('removed', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(20., rel=self.limit, abs=0.) assert attr.shape == (2, 2) def test_frame_selection(self) -> None: module = FrameSelectionModule(name_in='select1', image_in_tag='start', selected_out_tag='selected1', removed_out_tag='removed1', index_out_tag='index1', method='median', threshold=2., fwhm=0.1, aperture=('circular', 0.1), position=(None, None, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('select1') data = self.pipeline.get_data('selected1') assert np.sum(data) == pytest.approx(54.58514780071149, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) data = self.pipeline.get_data('removed1') assert np.sum(data) == pytest.approx(9.857922674786586, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) data = self.pipeline.get_data('index1') assert np.sum(data) == pytest.approx(5, rel=self.limit, abs=0.) assert data.shape == (1, ) attr = self.pipeline.get_attribute('selected1', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(17., rel=self.limit, abs=0.) assert attr.shape == (5, ) attr = self.pipeline.get_attribute('selected1', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(50, rel=self.limit, abs=0.) assert attr.shape == (5, 2) attr = self.pipeline.get_attribute('removed1', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(7., rel=self.limit, abs=0.) assert attr.shape == (1, ) attr = self.pipeline.get_attribute('removed1', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (1, 2) module = FrameSelectionModule(name_in='select2', image_in_tag='start', selected_out_tag='selected2', removed_out_tag='removed2', index_out_tag='index2', method='max', threshold=1., fwhm=0.1, aperture=('annulus', 0.05, 0.1), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select2') data = self.pipeline.get_data('selected2') assert np.sum(data) == pytest.approx(21.42652724866543, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) data = self.pipeline.get_data('removed2') assert np.sum(data) == pytest.approx(43.016543226832646, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('index2') assert np.sum(data) == pytest.approx(10, rel=self.limit, abs=0.) assert data.shape == (4, ) attr = self.pipeline.get_attribute('selected2', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(8., rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('selected2', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(20, rel=self.limit, abs=0.) assert attr.shape == (2, 2) attr = self.pipeline.get_attribute('removed2', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(16., rel=self.limit, abs=0.) assert attr.shape == (4, ) attr = self.pipeline.get_attribute('removed2', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(40, rel=self.limit, abs=0.) assert attr.shape == (4, 2) module = FrameSelectionModule(name_in='select3', image_in_tag='start', selected_out_tag='selected3', removed_out_tag='removed3', index_out_tag='index3', method='range', threshold=(10., 10.7), fwhm=0.1, aperture=('circular', 0.1), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select3') data = self.pipeline.get_data('selected3') assert np.sum(data) == pytest.approx(22.2568501695632, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) data = self.pipeline.get_data('removed3') assert np.sum(data) == pytest.approx(42.18622030593487, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('index3') assert np.sum(data) == pytest.approx(12, rel=self.limit, abs=0.) assert data.shape == (4, ) def test_image_statistics_full(self) -> None: module = ImageStatisticsModule(name_in='stat1', image_in_tag='read', stat_out_tag='stat1', position=None) self.pipeline.add_module(module) self.pipeline.run_module('stat1') data = self.pipeline.get_data('stat1') assert np.sum(data) == pytest.approx(115.68591492205017, rel=self.limit, abs=0.) assert data.shape == (10, 6) def test_image_statistics_posiiton(self) -> None: module = ImageStatisticsModule(name_in='stat2', image_in_tag='read', stat_out_tag='stat2', position=(5, 5, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('stat2') data = self.pipeline.get_data('stat2') assert np.sum(data) == pytest.approx(118.7138708968444, rel=self.limit, abs=0.) assert data.shape == (10, 6) def test_frame_similarity_mse(self) -> None: module = FrameSimilarityModule(name_in='simi1', image_tag='read', method='MSE', mask_radius=(0., 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('simi1') attr = self.pipeline.get_attribute('read', 'MSE', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(0.11739141370277852, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_frame_similarity_pcc(self) -> None: module = FrameSimilarityModule(name_in='simi2', image_tag='read', method='PCC', mask_radius=(0., 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('simi2') attr = self.pipeline.get_attribute('read', 'PCC', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(9.134820985662829, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_frame_similarity_ssim(self) -> None: module = FrameSimilarityModule(name_in='simi3', image_tag='read', method='SSIM', mask_radius=(0., 0.2), temporal_median='constant') self.pipeline.add_module(module) self.pipeline.run_module('simi3') attr = self.pipeline.get_attribute('read', 'SSIM', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(9.096830542868524, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_select_by_attribute(self) -> None: self.pipeline.set_attribute('read', 'INDEX', np.arange(44), static=False) module = SelectByAttributeModule(name_in='frame_removal_1', image_in_tag='read', attribute_tag='SSIM', number_frames=6, order='descending', selected_out_tag='select_sim', removed_out_tag='remove_sim') self.pipeline.add_module(module) self.pipeline.run_module('frame_removal_1') attr = self.pipeline.get_attribute('select_sim', 'INDEX', static=False) assert np.sum(attr) == pytest.approx(946, rel=self.limit, abs=0.) assert attr.shape == (44, ) attr = self.pipeline.get_attribute('select_sim', 'SSIM', static=False) assert np.sum(attr) == pytest.approx(5.556889532446573, rel=self.limit, abs=0.) assert attr.shape == (6, ) attr = self.pipeline.get_attribute('remove_sim', 'SSIM', static=False) assert np.sum(attr) == pytest.approx(3.539941010421951, rel=self.limit, abs=0.) assert attr.shape == (4, ) def test_residual_selection(self) -> None: module = ResidualSelectionModule(name_in='residual_select', image_in_tag='start', selected_out_tag='res_selected', removed_out_tag='res_removed', percentage=80., annulus_radii=(0.1, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('residual_select') data = self.pipeline.get_data('res_selected') assert np.sum(data) == pytest.approx(41.77295229983322, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('res_removed') assert np.sum(data) == pytest.approx(22.670118175664847, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11)
class TestFitsReadingModule(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir + "fits") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["fits"]) def test_fits_reading(self): read = FitsReadingModule(name_in="read1", input_dir=self.test_dir + "fits", image_tag="input", overwrite=False, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read1") data = self.pipeline.get_data("input") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_fits_reading_overwrite(self): read = FitsReadingModule(name_in="read2", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read2") data = self.pipeline.get_data("input") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_static_not_found(self): self.pipeline.set_attribute("config", "DIT", "ESO DET DIT", static=True) read = FitsReadingModule(name_in="read3", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("read3") assert len(warning) == 4 for item in warning: assert item.message.args[0] == "Static attribute DIT (=ESO DET DIT) not found in " \ "the FITS header." def test_static_changing(self): with fits.open(self.test_dir + "fits/image01.fits") as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + "fits/image01.fits", overwrite=True) with fits.open(self.test_dir + "fits/image02.fits") as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + "fits/image02.fits", overwrite=True) with fits.open(self.test_dir + "fits/image03.fits") as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.2 hdu.writeto(self.test_dir + "fits/image03.fits", overwrite=True) with fits.open(self.test_dir + "fits/image04.fits") as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.2 hdu.writeto(self.test_dir + "fits/image04.fits", overwrite=True) read = FitsReadingModule(name_in="read4", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("read4") assert len(warning) == 2 assert warning[0].message.args[0] == "Static attribute ESO DET DIT has changed. " \ "Possibly the current file image03.fits does " \ "not belong to the data set 'input'. Attribute " \ "value is updated." assert warning[1].message.args[0] == "Static attribute ESO DET DIT has changed. " \ "Possibly the current file image04.fits does " \ "not belong to the data set 'input'. Attribute " \ "value is updated." def test_header_attribute(self): with fits.open(self.test_dir + "fits/image01.fits") as hdu: header = hdu[0].header header['PARANG'] = 1.0 hdu.writeto(self.test_dir + "fits/image01.fits", overwrite=True) with fits.open(self.test_dir + "fits/image02.fits") as hdu: header = hdu[0].header header['PARANG'] = 2.0 hdu.writeto(self.test_dir + "fits/image02.fits", overwrite=True) with fits.open(self.test_dir + "fits/image03.fits") as hdu: header = hdu[0].header header['PARANG'] = 3.0 header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + "fits/image03.fits", overwrite=True) with fits.open(self.test_dir + "fits/image04.fits") as hdu: header = hdu[0].header header['PARANG'] = 4.0 header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + "fits/image04.fits", overwrite=True) read = FitsReadingModule(name_in="read5", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read5") def test_non_static_not_found(self): self.pipeline.set_attribute("config", "DIT", "None", static=True) for i in range(1, 5): with fits.open(self.test_dir + "fits/image0" + str(i) + ".fits") as hdu: header = hdu[0].header del header['HIERARCH ESO DET DIT'] del header['HIERARCH ESO DET EXP NO'] hdu.writeto(self.test_dir + "fits/image0" + str(i) + ".fits", overwrite=True) read = FitsReadingModule(name_in="read6", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("read6") assert len(warning) == 4 for item in warning: assert item.message.args[0] == "Non-static attribute EXP_NO (=ESO DET EXP NO) not " \ "found in the FITS header." def test_fits_read_files(self): folder = os.path.dirname(os.path.abspath(__file__)) read = FitsReadingModule(name_in="read7", input_dir=None, image_tag="files", overwrite=False, check=True, filenames=[ folder + "/fits/image01.fits", folder + "/fits/image03.fits" ]) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("read7") assert len(warning) == 2 for item in warning: assert item.message.args[0] == "Non-static attribute EXP_NO (=ESO DET EXP NO) not " \ "found in the FITS header." data = self.pipeline.get_data("files") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010032245393723324, rtol=limit, atol=0.) assert data.shape == (20, 100, 100)
class TestHdf5WritingModule: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, files=['test.hdf5']) def test_hdf5_writing(self): write = Hdf5WritingModule(file_name='test.hdf5', name_in='write1', output_dir=None, tag_dictionary={'images':'data1'}, keep_attributes=True, overwrite=True) self.pipeline.add_module(write) self.pipeline.run_module('write1') def test_no_data_tag(self): write = Hdf5WritingModule(file_name='test.hdf5', name_in='write2', output_dir=None, tag_dictionary={'empty':'empty'}, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('write2') assert len(warning) == 1 assert warning[0].message.args[0] == 'No data under the tag which is linked by the ' \ 'InputPort.' def test_overwrite_false(self): write = Hdf5WritingModule(file_name='test.hdf5', name_in='write3', output_dir=None, tag_dictionary={'images':'data2'}, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) self.pipeline.run_module('write3') def test_dictionary_none(self): write = Hdf5WritingModule(file_name='test.hdf5', name_in='write4', output_dir=None, tag_dictionary=None, keep_attributes=True, overwrite=False) self.pipeline.add_module(write) self.pipeline.run_module('write4') def test_hdf5_reading(self): read = Hdf5ReadingModule(name_in='read', input_filename='test.hdf5', input_dir=self.test_dir, tag_dictionary={'data1':'data1', 'data2':'data2'}) self.pipeline.add_module(read) self.pipeline.run_module('read') data1 = self.pipeline.get_data('data1') data2 = self.pipeline.get_data('data2') data3 = self.pipeline.get_data('images') assert np.allclose(data1, data2, rtol=limit, atol=0.) assert np.allclose(data2, data3, rtol=limit, atol=0.) attribute1 = self.pipeline.get_attribute('images', 'PARANG', static=False) attribute2 = self.pipeline.get_attribute('data1', 'PARANG', static=False) attribute3 = self.pipeline.get_attribute('data2', 'PARANG', static=False) assert np.allclose(attribute1, attribute2, rtol=limit, atol=0.) assert np.allclose(attribute2, attribute3, rtol=limit, atol=0.)
class TestTimeDenoising: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir+'images', npix_x=20, npix_y=20, x0=[10, 10, 10, 10], y0=[10, 10, 10, 10], parang_start=[0., 25., 50., 75.], parang_end=[25., 50., 75., 100.], exp_no=[1, 2, 3, 4]) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['images']) def test_read_data(self): module = FitsReadingModule(name_in='read', image_tag='images', input_dir=self.test_dir+'images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('images') assert np.allclose(data[0, 10, 10], 0.09799496683489618, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0025020285041348557, rtol=limit, atol=0.) assert data.shape == (40, 20, 20) def test_wavelet_denoising_cwt_dog(self): cwt_config = CwtWaveletConfiguration(wavelet='dog', wavelet_order=2, keep_mean=False, resolution=0.5) assert cwt_config.m_wavelet == 'dog' assert np.allclose(cwt_config.m_wavelet_order, 2, rtol=limit, atol=0.) assert not cwt_config.m_keep_mean assert np.allclose(cwt_config.m_resolution, 0.5, rtol=limit, atol=0.) module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_cwt_dog', image_in_tag='images', image_out_tag='wavelet_cwt_dog', padding='zero', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_cwt_dog') data = self.pipeline.get_data('wavelet_cwt_dog') assert np.allclose(data[0, 10, 10], 0.09805577173716859, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.002502083112599873, rtol=limit, atol=0.) assert data.shape == (40, 20, 20) with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 self.pipeline.run_module('wavelet_cwt_dog') data_multi = self.pipeline.get_data('wavelet_cwt_dog') assert np.allclose(data, data_multi, rtol=limit, atol=0.) assert data.shape == data_multi.shape def test_wavelet_denoising_cwt_morlet(self): with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 cwt_config = CwtWaveletConfiguration(wavelet='morlet', wavelet_order=5, keep_mean=False, resolution=0.5) assert cwt_config.m_wavelet == 'morlet' assert np.allclose(cwt_config.m_wavelet_order, 5, rtol=limit, atol=0.) assert not cwt_config.m_keep_mean assert np.allclose(cwt_config.m_resolution, 0.5, rtol=limit, atol=0.) module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_cwt_morlet', image_in_tag='images', image_out_tag='wavelet_cwt_morlet', padding='mirror', median_filter=False, threshold_function='hard') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_cwt_morlet') data = self.pipeline.get_data('wavelet_cwt_morlet') assert np.allclose(data[0, 10, 10], 0.09805577173716859, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0025019409784314286, rtol=limit, atol=0.) assert data.shape == (40, 20, 20) data = self.pipeline.get_attribute('wavelet_cwt_morlet', 'NFRAMES', static=False) assert np.allclose(data, [10, 10, 10, 10], rtol=limit, atol=0.) def test_wavelet_denoising_dwt(self): dwt_config = DwtWaveletConfiguration(wavelet='db8') assert dwt_config.m_wavelet == 'db8' module = WaveletTimeDenoisingModule(wavelet_configuration=dwt_config, name_in='wavelet_dwt', image_in_tag='images', image_out_tag='wavelet_dwt', padding='zero', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_dwt') data = self.pipeline.get_data('wavelet_dwt') assert np.allclose(data[0, 10, 10], 0.09650639476873678, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0024998798596330475, rtol=limit, atol=0.) assert data.shape == (40, 20, 20) def test_time_normalization(self): module = TimeNormalizationModule(name_in='timenorm', image_in_tag='images', image_out_tag='timenorm') self.pipeline.add_module(module) self.pipeline.run_module('timenorm') data = self.pipeline.get_data('timenorm') assert np.allclose(data[0, 10, 10], 0.09793500165714215, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0024483409033199985, rtol=limit, atol=0.) assert data.shape == (40, 20, 20) def test_wavelet_denoising_odd_size(self): module = AddLinesModule(name_in='add', image_in_tag='images', image_out_tag='images_odd', lines=(1, 0, 1, 0)) self.pipeline.add_module(module) self.pipeline.run_module('add') data = self.pipeline.get_data('images_odd') assert np.allclose(data[0, 10, 10], 0.05294085050174391, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.002269413609192613, rtol=limit, atol=0.) assert data.shape == (40, 21, 21) cwt_config = CwtWaveletConfiguration(wavelet='dog', wavelet_order=2, keep_mean=False, resolution=0.5) assert cwt_config.m_wavelet == 'dog' assert np.allclose(cwt_config.m_wavelet_order, 2, rtol=limit, atol=0.) assert not cwt_config.m_keep_mean assert np.allclose(cwt_config.m_resolution, 0.5, rtol=limit, atol=0.) module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_odd_1', image_in_tag='images_odd', image_out_tag='wavelet_odd_1', padding='zero', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_odd_1') data = self.pipeline.get_data('wavelet_odd_1') assert np.allclose(data[0, 10, 10], 0.0529782051386938, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0022694631406801565, rtol=limit, atol=0.) assert data.shape == (40, 21, 21) module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_odd_2', image_in_tag='images_odd', image_out_tag='wavelet_odd_2', padding='mirror', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_odd_2') data = self.pipeline.get_data('wavelet_odd_2') assert np.allclose(data[0, 10, 10], 0.05297146283932275, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0022694809842930034, rtol=limit, atol=0.) assert data.shape == (40, 21, 21) data = self.pipeline.get_attribute('images', 'NFRAMES', static=False) assert np.allclose(data, [10, 10, 10, 10], rtol=limit, atol=0.) data = self.pipeline.get_attribute('wavelet_odd_1', 'NFRAMES', static=False) assert np.allclose(data, [10, 10, 10, 10], rtol=limit, atol=0.) data = self.pipeline.get_attribute('wavelet_odd_2', 'NFRAMES', static=False) assert np.allclose(data, [10, 10, 10, 10], rtol=limit, atol=0.)
class TestPypeline(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" np.random.seed(1) image_3d = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) image_2d = np.random.normal(loc=0, scale=2e-4, size=(10, 10)) science = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) dark = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) h5f = h5py.File(self.test_dir + "PynPoint_database.hdf5", "w") h5f.create_dataset("image_3d", data=image_3d) h5f.create_dataset("image_2d", data=image_2d) h5f.create_dataset("science", data=science) h5f.create_dataset("dark", data=dark) h5f.close() create_star_data(path=self.test_dir + "images") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["images"]) def test_output_port_name(self): read = FitsReadingModule(name_in="read", input_dir=self.test_dir + "images", image_tag="images") read.add_output_port("test") with pytest.warns(UserWarning) as warning: read.add_output_port("test") assert len(warning) == 1 assert warning[0].message.args[ 0] == "Tag 'test' of ReadingModule 'read' is already used." process = BadPixelSigmaFilterModule(name_in="badpixel", image_in_tag="images") process.add_output_port("test") with pytest.warns(UserWarning) as warning: process.add_output_port("test") assert len(warning) == 1 assert warning[0].message.args[0] == "Tag 'test' of ProcessingModule 'badpixel' is " \ "already used." self.pipeline.m_data_storage.close_connection() process._m_data_base = self.test_dir + "database.hdf5" process.add_output_port("new") def test_apply_function_to_images_3d(self): self.pipeline.set_attribute("config", "MEMORY", 1, static=True) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove1", image_in_tag="image_3d", image_out_tag="remove_3d") self.pipeline.add_module(remove) self.pipeline.run_module("remove1") data = self.pipeline.get_data("image_3d") assert np.allclose(np.mean(data), 1.0141852764605783e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) data = self.pipeline.get_data("remove_3d") assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_2d(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove2", image_in_tag="image_2d", image_out_tag="remove_2d") self.pipeline.add_module(remove) self.pipeline.run_module("remove2") data = self.pipeline.get_data("image_2d") assert np.allclose(np.mean(data), 1.2869483197883442e-05, rtol=limit, atol=0.) assert data.shape == (10, 10) data = self.pipeline.get_data("remove_2d") assert np.allclose(np.mean(data), 1.3957075246029751e-05, rtol=limit, atol=0.) assert data.shape == (10, 9) def test_apply_function_to_images_same_port(self): dark = DarkCalibrationModule(name_in="dark1", image_in_tag="science", dark_in_tag="dark", image_out_tag="science") self.pipeline.add_module(dark) self.pipeline.run_module("dark1") data = self.pipeline.get_data("science") assert np.allclose(np.mean(data), -3.190113568690675e-06, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) self.pipeline.set_attribute("config", "MEMORY", 0, static=True) dark = DarkCalibrationModule(name_in="dark2", image_in_tag="science", dark_in_tag="dark", image_out_tag="science") self.pipeline.add_module(dark) self.pipeline.run_module("dark2") data = self.pipeline.get_data("science") assert np.allclose(np.mean(data), -1.026073475228737e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove3", image_in_tag="remove_3d", image_out_tag="remove_3d") self.pipeline.add_module(remove) with pytest.raises(ValueError) as error: self.pipeline.run_module("remove3") assert str(error.value) == "Input and output port have the same tag while the input " \ "function is changing the image shape. This is only " \ "possible with MEMORY=None." def test_apply_function_to_images_memory_none(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove4", image_in_tag="image_3d", image_out_tag="remove_3d_none") self.pipeline.add_module(remove) self.pipeline.run_module("remove4") data = self.pipeline.get_data("remove_3d_none") assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_3d_args(self): self.pipeline.set_attribute("config", "MEMORY", 1, static=True) self.pipeline.set_attribute("image_3d", "PIXSCALE", 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in="scale1", image_in_tag="image_3d", image_out_tag="scale_3d") self.pipeline.add_module(scale) self.pipeline.run_module("scale1") data = self.pipeline.get_data("scale_3d") assert np.allclose(np.mean(data), 7.042953308754017e-05, rtol=limit, atol=0.) assert data.shape == (4, 12, 12) attribute = self.pipeline.get_attribute("scale_3d", "PIXSCALE", static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.) def test_apply_function_to_images_2d_args(self): self.pipeline.set_attribute("image_2d", "PIXSCALE", 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in="scale2", image_in_tag="image_2d", image_out_tag="scale_2d") self.pipeline.add_module(scale) self.pipeline.run_module("scale2") data = self.pipeline.get_data("scale_2d") assert np.allclose(np.mean(data), 8.937141109641279e-05, rtol=limit, atol=0.) assert data.shape == (12, 12) attribute = self.pipeline.get_attribute("scale_2d", "PIXSCALE", static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.)
class TestPSFpreparation(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir + "prep") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["prep"]) def test_read_data(self): read = FitsReadingModule(name_in="read", image_tag="read", input_dir=self.test_dir + "prep") self.pipeline.add_module(read) self.pipeline.run_module("read") data = self.pipeline.get_data("read") assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_angle_interpolation(self): angle = AngleInterpolationModule(name_in="angle1", data_tag="read") self.pipeline.add_module(angle) self.pipeline.run_module("angle1") data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], 0., rtol=limit, atol=0.) assert np.allclose(data[15], 7.777777777777778, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_angle_calculation(self): self.pipeline.set_attribute("read", "LATITUDE", -25.) self.pipeline.set_attribute("read", "LONGITUDE", -70.) self.pipeline.set_attribute("read", "DIT", 1.) self.pipeline.set_attribute("read", "RA", (90., 90., 90., 90.), static=False) self.pipeline.set_attribute("read", "DEC", (-51., -51., -51., -51.), static=False) self.pipeline.set_attribute("read", "PUPIL", (90., 90., 90., 90.), static=False) date = ("2012-12-01T07:09:00.0000", "2012-12-01T07:09:01.0000", "2012-12-01T07:09:02.0000", "2012-12-01T07:09:03.0000") self.pipeline.set_attribute("read", "DATE", date, static=False) angle = AngleCalculationModule(instrument="NACO", name_in="angle2", data_tag="read") self.pipeline.add_module(angle) self.pipeline.run_module("angle2") data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], -55.041097524594186, rtol=limit, atol=0.) assert np.allclose(np.mean(data), -54.99858342139904, rtol=limit, atol=0.) assert data.shape == (40, ) self.pipeline.set_attribute("read", "RA", (60000.0, 60000.0, 60000.0, 60000.0), static=False) self.pipeline.set_attribute("read", "DEC", (-510000., -510000., -510000., -510000.), static=False) angle = AngleCalculationModule(instrument="SPHERE/IRDIS", name_in="angle3", data_tag="read") self.pipeline.add_module(angle) self.pipeline.run_module("angle3") data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], 170.39102733657813, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 170.46341141667205, rtol=limit, atol=0.) assert data.shape == (40, ) angle = AngleCalculationModule(instrument="SPHERE/IFS", name_in="angle4", data_tag="read") self.pipeline.add_module(angle) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("angle4") assert len(warning) == 2 assert warning[0].message.args[0] == "AngleCalculationModule has not been tested for " \ "SPHERE/IFS data." assert warning[1].message.args[0] == "For SPHERE data it is recommended to use the " \ "header keywords \"ESO INS4 DROT2 RA/DEC\" to " \ "specify the object's position. The input will be " \ "parsed accordingly. Using the regular RA/DEC "\ "parameters will lead to wrong parallactic angles." \ data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], -89.12897266342185, rtol=limit, atol=0.) assert np.allclose(np.mean(data), -89.02755900320116, rtol=limit, atol=0.) assert data.shape == (40, ) def test_angle_interpolation_mismatch(self): self.pipeline.set_attribute("read", "NDIT", [9, 9, 9, 9], static=False) angle = AngleInterpolationModule(name_in="angle5", data_tag="read") self.pipeline.add_module(angle) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("angle5") assert len(warning) == 1 assert warning[0].message.args[0] == "There is a mismatch between the NDIT and NFRAMES " \ "values. The derotation angles are calculated with " \ "a linear interpolation by using NFRAMES steps. A " \ "frame selection should be applied after the " \ "derotation angles are calculated." data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], 0., rtol=limit, atol=0.) assert np.allclose(data[15], 7.777777777777778, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_psf_preparation_norm_mask(self): prep = PSFpreparationModule(name_in="prep1", image_in_tag="read", image_out_tag="prep1", mask_out_tag="mask1", norm=True, cent_size=0.1, edge_size=1.0) self.pipeline.add_module(prep) self.pipeline.run_module("prep1") data = self.pipeline.get_data("prep1") assert np.allclose(data[0, 0, 0], 0., rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], 0., rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0001690382058762809, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) data = self.pipeline.get_data("mask1") assert np.allclose(data[0, 0], 0., rtol=limit, atol=0.) assert np.allclose(data[99, 99], 0., rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.4268, rtol=limit, atol=0.) assert data.shape == (100, 100) def test_psf_preparation_none(self): prep = PSFpreparationModule(name_in="prep2", image_in_tag="read", image_out_tag="prep2", mask_out_tag="mask2", norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(prep) self.pipeline.run_module("prep2") data = self.pipeline.get_data("prep2") assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], -0.000287573978535779, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_psf_preparation_no_mask_out(self): prep = PSFpreparationModule(name_in="prep3", image_in_tag="read", image_out_tag="prep3", mask_out_tag=None, norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(prep) self.pipeline.run_module("prep3") data = self.pipeline.get_data("prep3") assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], -0.000287573978535779, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_sdi_preparation(self): sdi = SDIpreparationModule(name_in="sdi", wavelength=(0.65, 0.6), width=(0.1, 0.5), image_in_tag="read", image_out_tag="sdi") self.pipeline.add_module(sdi) self.pipeline.run_module("sdi") data = self.pipeline.get_data("sdi") assert np.allclose(data[0, 25, 25], -2.6648118007008814e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 2.0042892634995876e-05, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) attribute = self.pipeline.get_attribute( "sdi", "History: SDIpreparationModule") assert attribute == "(line, continuum) = (0.65, 0.6)"
class TestNearInitModule(object): def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' self.fitsfile = self.test_dir + 'near/images_1.fits' create_near_data(path=self.test_dir + 'near') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) self.pipeline.set_attribute('config', 'NFRAMES', 'ESO DET CHOP NCYCLES', static=True) self.pipeline.set_attribute('config', 'EXP_NO', 'ESO TPL EXPNO', static=True) self.pipeline.set_attribute('config', 'NDIT', 'None', static=True) self.pipeline.set_attribute('config', 'PARANG_START', 'None', static=True) self.pipeline.set_attribute('config', 'PARANG_END', 'None', static=True) self.pipeline.set_attribute('config', 'DITHER_X', 'None', static=True) self.pipeline.set_attribute('config', 'DITHER_Y', 'None', static=True) self.pipeline.set_attribute('config', 'PIXSCALE', 0.045, static=True) self.pipeline.set_attribute('config', 'MEMORY', 100, static=True) self.positions = ('chopa', 'chopb') def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['near']) def test_near_read(self) -> None: module = NearReadingModule(name_in='read1a', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) self.pipeline.run_module('read1a') for item in self.positions: data = self.pipeline.get_data(item) assert np.allclose(np.mean(data), 0.060582854, rtol=limit, atol=0.) assert data.shape == (20, 10, 10) def test_near_subtract_crop_mean(self) -> None: module = NearReadingModule(name_in='read1b', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1], subtract=True, crop=(None, None, 0.3), combine='mean') self.pipeline.add_module(module) self.pipeline.run_module('read1b') data = self.pipeline.get_data(self.positions[0]) assert np.allclose(np.mean(data), 0.0, rtol=limit, atol=0.) assert data.shape == (4, 7, 7) data = self.pipeline.get_data(self.positions[1]) assert np.allclose(np.mean(data), 0.0, rtol=limit, atol=0.) assert data.shape == (4, 7, 7) def test_near_median(self) -> None: module = NearReadingModule(name_in='read1c', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1], combine='median') self.pipeline.add_module(module) self.pipeline.run_module('read1c') data = self.pipeline.get_data(self.positions[0]) assert np.allclose(np.mean(data), 0.060582854, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) data = self.pipeline.get_data(self.positions[1]) assert np.allclose(np.mean(data), 0.060582854, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) def test_static_not_found(self) -> None: self.pipeline.set_attribute('config', 'DIT', 'Test', static=True) module = NearReadingModule(name_in='read2', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read2') assert len(warning) == 8 for item in warning: assert item.message.args[0] == 'Static attribute DIT (=Test) not found in the FITS ' \ 'header.' self.pipeline.set_attribute('config', 'DIT', 'ESO DET SEQ1 DIT', static=True) def test_nonstatic_not_found(self) -> None: self.pipeline.set_attribute('config', 'NDIT', 'Test', static=True) module = NearReadingModule(name_in='read3', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read3') assert len(warning) == 8 for item in warning: assert item.message.args[0] == 'Non-static attribute NDIT (=Test) not found in the ' \ 'FITS header.' self.pipeline.set_attribute('config', 'NDIT', 'None', static=True) def test_check_header(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[0].header['ESO DET CHOP ST'] = 'F' hdulist[0].header['ESO DET CHOP CYCSKIP'] = 1 hdulist[0].header['ESO DET CHOP CYCSUM'] = 'T' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read4', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read4') assert len(warning) == 3 assert warning[0].message.args[ 0] == 'Dataset was obtained without chopping.' assert warning[1].message.args[ 0] == 'Chop cycles (1) have been skipped.' assert warning[2].message.args[ 0] == 'FITS file contains averaged images.' with fits.open(self.fitsfile) as hdulist: hdulist[0].header['ESO DET CHOP ST'] = 'T' hdulist[0].header['ESO DET CHOP CYCSKIP'] = 0 hdulist[0].header['ESO DET CHOP CYCSUM'] = 'F' hdulist.writeto(self.fitsfile, overwrite=True) def test_frame_type_invalid(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[10].header['ESO DET FRAM TYPE'] = 'Test' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read5', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read5') assert str(error.value) == 'Frame type (Test) not a valid value. Expecting HCYCLE1 or ' \ 'HCYCLE2 as value for ESO DET FRAM TYPE.' def test_frame_type_missing(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[10].header.remove('ESO DET FRAM TYPE') hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read6', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read6') assert str( error.value ) == 'Frame type not found in the FITS header. Image number: 9.' def test_same_cycle(self) -> None: with fits.open(self.fitsfile) as hdulist: with pytest.warns(UserWarning) as warning: hdulist[10].header['ESO DET FRAM TYPE'] = 'HCYCLE1' assert len(warning) == 1 assert warning[0].message.args[0] == 'Keyword name \'ESO DET FRAM TYPE\' is greater ' \ 'than 8 characters or contains characters not ' \ 'allowed by the FITS standard; a HIERARCH card ' \ 'will be created.' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read7', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read7') assert len(warning) == 2 assert warning[0].message.args[0] == 'Previous and current chop position (HCYCLE1) are ' \ 'the same. Skipping the current image.' assert warning[1].message.args[0] == 'The number of images is not equal for chop A and ' \ 'chop B.' def test_odd_number_images(self) -> None: with fits.open(self.fitsfile) as hdulist: del hdulist[11] hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read8', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read8') assert len(warning) == 2 assert warning[0].message.args[0] == f'FITS file contains odd number of images: ' \ f'{self.fitsfile}' assert warning[1].message.args[0] == 'The number of chop cycles (5) is not equal to ' \ 'half the number of available HDU images (4).'
class TestStackSubset: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'data') create_star_data(path=self.test_dir + 'extra') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['data', 'extra']) def test_read_data(self): read = FitsReadingModule(name_in='read1', image_tag='images', input_dir=self.test_dir + 'data', overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module('read1') data = self.pipeline.get_data('images') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) read = FitsReadingModule(name_in='read2', image_tag='extra', input_dir=self.test_dir + 'extra', overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module('read2') extra = self.pipeline.get_data('extra') assert np.allclose(data, extra, rtol=limit, atol=0.) def test_stack_and_subset(self): self.pipeline.set_attribute('images', 'PARANG', np.arange(1., 41., 1.), static=False) stack = StackAndSubsetModule(name_in='stack', image_in_tag='images', image_out_tag='stack', random=10, stacking=2) self.pipeline.add_module(stack) self.pipeline.run_module('stack') data = self.pipeline.get_data('stack') assert np.allclose(data[0, 50, 50], 0.09816320034649725, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.983545774937238e-05, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) data = self.pipeline.get_data('header_stack/INDEX') index = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] assert np.allclose(data, index, rtol=limit, atol=0.) assert data.shape == (10, ) data = self.pipeline.get_data('header_stack/PARANG') parang = [1.5, 15.5, 19.5, 23.5, 25.5, 29.5, 31.5, 35.5, 37.5, 39.5] assert np.allclose(data, parang, rtol=limit, atol=0.) assert data.shape == (10, ) def test_mean_cube(self): with pytest.warns(DeprecationWarning) as warning: mean = MeanCubeModule(name_in='mean', image_in_tag='images', image_out_tag='mean') assert len(warning) == 1 assert warning[0].message.args[0] == 'The MeanCubeModule will be be deprecated in a ' \ 'future release. Please use the StackCubesModule ' \ 'instead.' self.pipeline.add_module(mean) self.pipeline.run_module('mean') data = self.pipeline.get_data('mean') assert np.allclose(data[0, 50, 50], 0.09805840100024205, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738069, rtol=limit, atol=0.) assert data.shape == (4, 100, 100) attribute = self.pipeline.get_attribute('mean', 'INDEX', static=False) assert np.allclose(np.mean(attribute), 1.5, rtol=limit, atol=0.) assert attribute.shape == (4, ) attribute = self.pipeline.get_attribute('mean', 'NFRAMES', static=False) assert np.allclose(np.mean(attribute), 1, rtol=limit, atol=0.) assert attribute.shape == (4, ) def test_stack_cube(self): module = StackCubesModule(name_in='stackcube', image_in_tag='images', image_out_tag='mean', combine='mean') self.pipeline.add_module(module) self.pipeline.run_module('stackcube') data = self.pipeline.get_data('mean') assert np.allclose(data[0, 50, 50], 0.09805840100024205, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738069, rtol=limit, atol=0.) assert data.shape == (4, 100, 100) attribute = self.pipeline.get_attribute('mean', 'INDEX', static=False) assert np.allclose(np.mean(attribute), 1.5, rtol=limit, atol=0.) assert attribute.shape == (4, ) attribute = self.pipeline.get_attribute('mean', 'NFRAMES', static=False) assert np.allclose(np.mean(attribute), 1, rtol=limit, atol=0.) assert attribute.shape == (4, ) def test_derotate_and_stack(self): derotate = DerotateAndStackModule(name_in='derotate1', image_in_tag='images', image_out_tag='derotate1', derotate=True, stack='mean', extra_rot=10.) self.pipeline.add_module(derotate) self.pipeline.run_module('derotate1') data = self.pipeline.get_data('derotate1') assert np.allclose(data[0, 50, 50], 0.09689679769268554, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010021671152246617, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) derotate = DerotateAndStackModule(name_in='derotate2', image_in_tag='images', image_out_tag='derotate2', derotate=False, stack='median', extra_rot=0.) self.pipeline.add_module(derotate) self.pipeline.run_module('derotate2') data = self.pipeline.get_data('derotate2') assert np.allclose(data[0, 50, 50], 0.09809001768003645, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010033064394962, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) def test_combine_tags(self): combine = CombineTagsModule(image_in_tags=('images', 'extra'), check_attr=True, index_init=False, name_in='combine1', image_out_tag='combine1') self.pipeline.add_module(combine) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('combine1') assert len(warning) == 1 assert warning[0].message.args[0] == 'The non-static keyword FILES is already used but ' \ 'with different values. It is advisable to only ' \ 'combine tags that descend from the same data set.' data = self.pipeline.get_data('combine1') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738068, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('header_combine1/INDEX') assert data[40] == 0 assert data.shape == (80, ) combine = CombineTagsModule(image_in_tags=('images', 'extra'), check_attr=False, index_init=True, name_in='combine2', image_out_tag='combine2') self.pipeline.add_module(combine) self.pipeline.run_module('combine2') data = self.pipeline.get_data('combine1') extra = self.pipeline.get_data('combine2') assert np.allclose(data, extra, rtol=limit, atol=0.) data = self.pipeline.get_data('header_combine2/INDEX') assert data[40] == 40 assert data.shape == (80, )
class TestExtract: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'star', npix_x=51, npix_y=51, x0=[10., 10., 10., 10.], y0=[10., 10., 10., 10.]) create_fake(path=self.test_dir + 'binary', ndit=[20, 20, 20, 20], nframes=[20, 20, 20, 20], exp_no=[1, 2, 3, 4], npix=(101, 101), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=20., contrast=1.) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(path=self.test_dir, folders=['star', 'binary']) def test_read_data(self): module = FitsReadingModule(name_in='read1', image_tag='star', input_dir=self.test_dir + 'star', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('star') assert np.allclose(data[0, 10, 10], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00038538535294683216, rtol=limit, atol=0.) assert data.shape == (40, 51, 51) module = FitsReadingModule(name_in='read2', image_tag='binary', input_dir=self.test_dir + 'binary', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('binary') assert np.allclose(data[0, 50, 50], 0.0986064357966972, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00019636787665654158, rtol=limit, atol=0.) assert data.shape == (80, 101, 101) def test_angle_interpolation(self): module = AngleInterpolationModule(name_in='angle', data_tag='binary') self.pipeline.add_module(module) self.pipeline.run_module('angle') data = self.pipeline.get_attribute('binary', 'PARANG', static=False) assert data[5] == 6.578947368421053 assert np.allclose(np.mean(data), 50.0, rtol=limit, atol=0.) assert data.shape == (80, ) parang = self.pipeline.get_attribute('binary', 'PARANG', static=False) self.pipeline.set_attribute('binary', 'PARANG', -1. * parang, static=False) data = self.pipeline.get_attribute('binary', 'PARANG', static=False) assert data[5] == -6.578947368421053 assert np.allclose(np.mean(data), -50.0, rtol=limit, atol=0.) assert data.shape == (80, ) def test_extract_position_none(self): module = StarExtractionModule(name_in='extract1', image_in_tag='star', image_out_tag='extract1', index_out_tag='index', image_size=0.4, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract1') assert len(warning) == 1 assert warning[0].message.args[0] == 'The new dataset that is stored under the tag name ' \ '\'index\' is empty.' data = self.pipeline.get_data('extract1') assert np.allclose(data[0, 7, 7], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444871536643222, rtol=limit, atol=0.) assert data.shape == (40, 15, 15) attr = self.pipeline.get_attribute('extract1', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 10 def test_extract_center_none(self): module = StarExtractionModule(name_in='extract2', image_in_tag='star', image_out_tag='extract2', index_out_tag='index', image_size=0.4, fwhm_star=0.1, position=(None, None, 1.)) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract2') assert len(warning) == 1 assert warning[0].message.args[0] == 'The new dataset that is stored under the tag name ' \ '\'index\' is empty.' data = self.pipeline.get_data('extract2') assert np.allclose(data[0, 7, 7], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444871536643222, rtol=limit, atol=0.) assert data.shape == (40, 15, 15) attr = self.pipeline.get_attribute('extract2', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 10 def test_extract_position(self): module = StarExtractionModule(name_in='extract7', image_in_tag='star', image_out_tag='extract7', index_out_tag=None, image_size=0.4, fwhm_star=0.1, position=(10, 10, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('extract7') data = self.pipeline.get_data('extract7') assert np.allclose(data[0, 7, 7], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444871536643222, rtol=limit, atol=0.) assert data.shape == (40, 15, 15) attr = self.pipeline.get_attribute('extract7', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 10 def test_extract_too_large(self): module = StarExtractionModule(name_in='extract3', image_in_tag='star', image_out_tag='extract3', index_out_tag=None, image_size=0.8, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract3') assert len(warning) == 40 for i, item in enumerate(warning): assert item.message.args[0] == f'Chosen image size is too large to crop the image ' \ f'around the brightest pixel (image index = {i}, ' \ f'pixel [x, y] = [10, 10]). Using the center of ' \ f'the image instead.' data = self.pipeline.get_data('extract3') assert np.allclose(data[0, 0, 0], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0004499242959139202, rtol=limit, atol=0.) assert data.shape == (40, 31, 31) attr = self.pipeline.get_attribute('extract3', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 25 def test_star_extract_cpu(self): with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = StarExtractionModule(name_in='extract4', image_in_tag='star', image_out_tag='extract4', index_out_tag='index', image_size=0.8, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract4') assert len(warning) == 1 assert warning[0].message.args[0] == 'Chosen image size is too large to crop the image ' \ 'around the brightest pixel. Using the center of ' \ 'the image instead.' def test_extract_binary(self): with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = ExtractBinaryModule(pos_center=(50., 50.), pos_binary=(50., 70.), name_in='extract5', image_in_tag='binary', image_out_tag='extract5', image_size=0.5, search_size=0.2, filter_size=None) self.pipeline.add_module(module) self.pipeline.run_module('extract5') data = self.pipeline.get_data('extract5') assert np.allclose(data[0, 9, 9], 0.09774483733119443, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0027700881940171283, rtol=limit, atol=0.) assert data.shape == (80, 19, 19) def test_extract_binary_filter(self): module = ExtractBinaryModule(pos_center=(50., 50.), pos_binary=(50., 70.), name_in='extract6', image_in_tag='binary', image_out_tag='extract6', image_size=0.5, search_size=0.2, filter_size=0.1) self.pipeline.add_module(module) self.pipeline.run_module('extract6') data = self.pipeline.get_data('extract6') assert np.allclose(data[0, 9, 9], 0.09774483733119443, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.002770040591615301, rtol=limit, atol=0.) assert data.shape == (80, 19, 19)
class TestPypeline: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' np.random.seed(1) image_3d = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) image_2d = np.random.normal(loc=0, scale=2e-4, size=(1, 10, 10)) science = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) dark = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'w') as hdf_file: hdf_file.create_dataset('image_3d', data=image_3d) hdf_file.create_dataset('image_2d', data=image_2d) hdf_file.create_dataset('science', data=science) hdf_file.create_dataset('dark', data=dark) create_star_data(path=self.test_dir + 'images') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['images']) def test_output_port_name(self): read = FitsReadingModule(name_in='read', input_dir=self.test_dir + 'images', image_tag='images') read.add_output_port('test') with pytest.warns(UserWarning) as warning: read.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ReadingModule \'read\' is already ' \ 'used.' process = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='images', image_out_tag='im_out') process.add_output_port('test') with pytest.warns(UserWarning) as warning: process.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ProcessingModule \'badpixel\' is ' \ 'already used.' self.pipeline.m_data_storage.close_connection() process._m_data_base = self.test_dir + 'database.hdf5' process.add_output_port('new') def test_apply_function_to_images_3d(self): self.pipeline.set_attribute('config', 'MEMORY', 1, static=True) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in='remove1', image_in_tag='image_3d', image_out_tag='remove_3d') self.pipeline.add_module(remove) self.pipeline.run_module('remove1') data = self.pipeline.get_data('image_3d') assert np.allclose(np.mean(data), 1.0141852764605783e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) data = self.pipeline.get_data('remove_3d') assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_2d(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in='remove2', image_in_tag='image_2d', image_out_tag='remove_2d') self.pipeline.add_module(remove) self.pipeline.run_module('remove2') data = self.pipeline.get_data('image_2d') assert np.allclose(np.mean(data), 1.2869483197883442e-05, rtol=limit, atol=0.) assert data.shape == (1, 10, 10) data = self.pipeline.get_data('remove_2d') assert np.allclose(np.mean(data), 1.3957075246029751e-05, rtol=limit, atol=0.) assert data.shape == (1, 10, 9) def test_apply_function_to_images_same_port(self): dark = DarkCalibrationModule(name_in='dark1', image_in_tag='science', dark_in_tag='dark', image_out_tag='science') self.pipeline.add_module(dark) self.pipeline.run_module('dark1') data = self.pipeline.get_data('science') assert np.allclose(np.mean(data), -3.190113568690675e-06, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) dark = DarkCalibrationModule(name_in='dark2', image_in_tag='science', dark_in_tag='dark', image_out_tag='science') self.pipeline.add_module(dark) self.pipeline.run_module('dark2') data = self.pipeline.get_data('science') assert np.allclose(np.mean(data), -1.026073475228737e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in='remove3', image_in_tag='remove_3d', image_out_tag='remove_3d') self.pipeline.add_module(remove) with pytest.raises(ValueError) as error: self.pipeline.run_module('remove3') assert str(error.value) == 'Input and output port have the same tag while the input ' \ 'function is changing the image shape. This is only ' \ 'possible with MEMORY=None.' def test_apply_function_to_images_memory_none(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in='remove4', image_in_tag='image_3d', image_out_tag='remove_3d_none') self.pipeline.add_module(remove) self.pipeline.run_module('remove4') data = self.pipeline.get_data('remove_3d_none') assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_3d_args(self): self.pipeline.set_attribute('config', 'MEMORY', 1, static=True) self.pipeline.set_attribute('image_3d', 'PIXSCALE', 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in='scale1', image_in_tag='image_3d', image_out_tag='scale_3d') self.pipeline.add_module(scale) self.pipeline.run_module('scale1') data = self.pipeline.get_data('scale_3d') assert np.allclose(np.mean(data), 7.042953308754017e-05, rtol=limit, atol=0.) assert data.shape == (4, 12, 12) attribute = self.pipeline.get_attribute('scale_3d', 'PIXSCALE', static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.) def test_apply_function_to_images_2d_args(self): self.pipeline.set_attribute('image_2d', 'PIXSCALE', 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in='scale2', image_in_tag='image_2d', image_out_tag='scale_2d') self.pipeline.add_module(scale) self.pipeline.run_module('scale2') data = self.pipeline.get_data('scale_2d') assert np.allclose(np.mean(data), 8.937141109641279e-05, rtol=limit, atol=0.) assert data.shape == (1, 12, 12) attribute = self.pipeline.get_attribute('scale_2d', 'PIXSCALE', static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.)