def test_set_and_get_attribute(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) pipeline.set_attribute('images', 'PIXSCALE', 0.1, static=True) pipeline.set_attribute('images', 'PARANG', np.arange(1., 11., 1.), static=False) attribute = pipeline.get_attribute('images', 'PIXSCALE', static=True) assert attribute == pytest.approx(0.1, rel=self.limit, abs=0.) attribute = pipeline.get_attribute('images', 'PARANG', static=False) assert attribute == pytest.approx(np.arange(1., 11., 1.), rel=self.limit, abs=0.) pipeline.set_attribute('images', 'PARANG', np.arange(10., 21., 1.), static=False) attribute = pipeline.get_attribute('images', 'PARANG', static=False) assert attribute == pytest.approx(np.arange(10., 21., 1.), rel=self.limit, abs=0.)
def test_set_and_get_attribute(self): pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) pipeline.set_attribute('images', 'PIXSCALE', 0.1, static=True) pipeline.set_attribute('images', 'PARANG', np.arange(1., 11., 1.), static=False) attribute = pipeline.get_attribute('images', 'PIXSCALE', static=True) assert np.allclose(attribute, 0.1, rtol=limit, atol=0.) attribute = pipeline.get_attribute('images', 'PARANG', static=False) assert np.allclose(attribute, np.arange(1., 11., 1.), rtol=limit, atol=0.) pipeline.set_attribute('images', 'PARANG', np.arange(10., 21., 1.), static=False) attribute = pipeline.get_attribute('images', 'PARANG', static=False) assert np.allclose(attribute, np.arange(10., 21., 1.), rtol=limit, atol=0.)
class TestFitsReading: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir + 'fits') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['fits'], files=['filenames.dat']) def test_fits_reading(self) -> None: module = FitsReadingModule(name_in='read1', input_dir=self.test_dir + 'fits', image_tag='input', overwrite=False, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('input') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_fits_reading_overwrite(self) -> None: module = FitsReadingModule(name_in='read2', input_dir=self.test_dir + 'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('input') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_static_not_found(self) -> None: self.pipeline.set_attribute('config', 'DIT', 'Test', static=True) module = FitsReadingModule(name_in='read3', input_dir=self.test_dir + 'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read3') assert len(warning) == 2 for item in warning: assert item.message.args[0] == 'Static attribute DIT (=Test) not found in the FITS ' \ 'header.' self.pipeline.set_attribute('config', 'DIT', 'ESO DET DIT', static=True) def test_static_changing(self) -> None: with fits.open(self.test_dir + 'fits/images_0.fits') as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + 'fits/images_0.fits', overwrite=True) with fits.open(self.test_dir + 'fits/images_1.fits') as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.2 hdu.writeto(self.test_dir + 'fits/images_1.fits', overwrite=True) module = FitsReadingModule(name_in='read4', input_dir=self.test_dir + 'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read4') assert len(warning) == 1 assert warning[0].message.args[0] == f'Static attribute ESO DET DIT has changed. ' \ f'Possibly the current file {self.test_dir}fits/' \ f'images_1.fits does not belong to the data set ' \ f'\'input\'. Attribute value is updated.' def test_header_attribute(self) -> None: with fits.open(self.test_dir + 'fits/images_0.fits') as hdu: header = hdu[0].header header['PARANG'] = 1.0 hdu.writeto(self.test_dir + 'fits/images_0.fits', overwrite=True) with fits.open(self.test_dir + 'fits/images_1.fits') as hdu: header = hdu[0].header header['PARANG'] = 2.0 header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + 'fits/images_1.fits', overwrite=True) module = FitsReadingModule(name_in='read5', input_dir=self.test_dir + 'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read5') def test_non_static_not_found(self) -> None: self.pipeline.set_attribute('config', 'DIT', 'None', static=True) for i in range(2): with fits.open(f'{self.test_dir}/fits/images_{i}.fits') as hdu: header = hdu[0].header del header['HIERARCH ESO DET DIT'] del header['HIERARCH ESO DET EXP NO'] hdu.writeto(f'{self.test_dir}/fits/images_{i}.fits', overwrite=True) module = FitsReadingModule(name_in='read6', input_dir=self.test_dir + 'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read6') assert len(warning) == 2 for item in warning: assert item.message.args[0] == 'Non-static attribute EXP_NO (=ESO DET EXP NO) not ' \ 'found in the FITS header.' def test_fits_read_files(self) -> None: module = FitsReadingModule(name_in='read7', input_dir=None, image_tag='files', overwrite=False, check=True, filenames=[ self.test_dir + 'fits/images_0.fits', self.test_dir + 'fits/images_1.fits' ]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read7') assert len(warning) == 2 for item in warning: assert item.message.args[0] == 'Non-static attribute EXP_NO (=ESO DET EXP NO) not ' \ 'found in the FITS header.' data = self.pipeline.get_data('files') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_fits_read_textfile(self) -> None: with open(self.test_dir + 'filenames.dat', 'w') as file_obj: file_obj.write(self.test_dir + 'fits/images_0.fits\n') file_obj.write(self.test_dir + 'fits/images_1.fits\n') module = FitsReadingModule(name_in='read8', input_dir=None, image_tag='files', overwrite=True, check=True, filenames=self.test_dir + 'filenames.dat') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read8') assert len(warning) == 2 for item in warning: assert item.message.args[0] == 'Non-static attribute EXP_NO (=ESO DET EXP NO) not ' \ 'found in the FITS header.' data = self.pipeline.get_data('files') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_fits_read_files_exists(self) -> None: module = FitsReadingModule(name_in='read9', input_dir=None, image_tag='files', overwrite=True, check=True, filenames=[ f'{self.test_dir}fits/images_0.fits', f'{self.test_dir}fits/images_2.fits' ]) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read9') assert str(error.value) == f'The file {self.test_dir}fits/images_2.fits does not exist. ' \ f'Please check that the path is correct.' def test_fits_read_textfile_exists(self) -> None: with open(self.test_dir + 'filenames.dat', 'w') as file_obj: file_obj.write(self.test_dir + 'fits/images_0.fits\n') file_obj.write(self.test_dir + 'fits/images_2.fits\n') module = FitsReadingModule(name_in='read10', input_dir=None, image_tag='files', overwrite=True, check=True, filenames=self.test_dir + 'filenames.dat') self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read10') assert str(error.value) == f'The file {self.test_dir}fits/images_2.fits does not exist. ' \ f'Please check that the path is correct.'
class TestNearInitModule(object): def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' self.fitsfile = self.test_dir + 'near/images_1.fits' create_near_data(path=self.test_dir + 'near') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) self.pipeline.set_attribute('config', 'NFRAMES', 'ESO DET CHOP NCYCLES', static=True) self.pipeline.set_attribute('config', 'EXP_NO', 'ESO TPL EXPNO', static=True) self.pipeline.set_attribute('config', 'NDIT', 'None', static=True) self.pipeline.set_attribute('config', 'PARANG_START', 'None', static=True) self.pipeline.set_attribute('config', 'PARANG_END', 'None', static=True) self.pipeline.set_attribute('config', 'DITHER_X', 'None', static=True) self.pipeline.set_attribute('config', 'DITHER_Y', 'None', static=True) self.pipeline.set_attribute('config', 'PIXSCALE', 0.045, static=True) self.pipeline.set_attribute('config', 'MEMORY', 100, static=True) self.positions = ('chopa', 'chopb') def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['near']) def test_near_read(self) -> None: module = NearReadingModule(name_in='read1a', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) self.pipeline.run_module('read1a') for item in self.positions: data = self.pipeline.get_data(item) assert np.allclose(np.mean(data), 0.060582854, rtol=limit, atol=0.) assert data.shape == (20, 10, 10) def test_near_subtract_crop_mean(self) -> None: module = NearReadingModule(name_in='read1b', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1], subtract=True, crop=(None, None, 0.3), combine='mean') self.pipeline.add_module(module) self.pipeline.run_module('read1b') data = self.pipeline.get_data(self.positions[0]) assert np.allclose(np.mean(data), 0.0, rtol=limit, atol=0.) assert data.shape == (4, 7, 7) data = self.pipeline.get_data(self.positions[1]) assert np.allclose(np.mean(data), 0.0, rtol=limit, atol=0.) assert data.shape == (4, 7, 7) def test_near_median(self) -> None: module = NearReadingModule(name_in='read1c', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1], combine='median') self.pipeline.add_module(module) self.pipeline.run_module('read1c') data = self.pipeline.get_data(self.positions[0]) assert np.allclose(np.mean(data), 0.060582854, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) data = self.pipeline.get_data(self.positions[1]) assert np.allclose(np.mean(data), 0.060582854, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) def test_static_not_found(self) -> None: self.pipeline.set_attribute('config', 'DIT', 'Test', static=True) module = NearReadingModule(name_in='read2', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read2') assert len(warning) == 8 for item in warning: assert item.message.args[0] == 'Static attribute DIT (=Test) not found in the FITS ' \ 'header.' self.pipeline.set_attribute('config', 'DIT', 'ESO DET SEQ1 DIT', static=True) def test_nonstatic_not_found(self) -> None: self.pipeline.set_attribute('config', 'NDIT', 'Test', static=True) module = NearReadingModule(name_in='read3', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read3') assert len(warning) == 8 for item in warning: assert item.message.args[0] == 'Non-static attribute NDIT (=Test) not found in the ' \ 'FITS header.' self.pipeline.set_attribute('config', 'NDIT', 'None', static=True) def test_check_header(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[0].header['ESO DET CHOP ST'] = 'F' hdulist[0].header['ESO DET CHOP CYCSKIP'] = 1 hdulist[0].header['ESO DET CHOP CYCSUM'] = 'T' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read4', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read4') assert len(warning) == 3 assert warning[0].message.args[ 0] == 'Dataset was obtained without chopping.' assert warning[1].message.args[ 0] == 'Chop cycles (1) have been skipped.' assert warning[2].message.args[ 0] == 'FITS file contains averaged images.' with fits.open(self.fitsfile) as hdulist: hdulist[0].header['ESO DET CHOP ST'] = 'T' hdulist[0].header['ESO DET CHOP CYCSKIP'] = 0 hdulist[0].header['ESO DET CHOP CYCSUM'] = 'F' hdulist.writeto(self.fitsfile, overwrite=True) def test_frame_type_invalid(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[10].header['ESO DET FRAM TYPE'] = 'Test' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read5', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read5') assert str(error.value) == 'Frame type (Test) not a valid value. Expecting HCYCLE1 or ' \ 'HCYCLE2 as value for ESO DET FRAM TYPE.' def test_frame_type_missing(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[10].header.remove('ESO DET FRAM TYPE') hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read6', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read6') assert str( error.value ) == 'Frame type not found in the FITS header. Image number: 9.' def test_same_cycle(self) -> None: with fits.open(self.fitsfile) as hdulist: with pytest.warns(UserWarning) as warning: hdulist[10].header['ESO DET FRAM TYPE'] = 'HCYCLE1' assert len(warning) == 1 assert warning[0].message.args[0] == 'Keyword name \'ESO DET FRAM TYPE\' is greater ' \ 'than 8 characters or contains characters not ' \ 'allowed by the FITS standard; a HIERARCH card ' \ 'will be created.' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read7', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read7') assert len(warning) == 2 assert warning[0].message.args[0] == 'Previous and current chop position (HCYCLE1) are ' \ 'the same. Skipping the current image.' assert warning[1].message.args[0] == 'The number of images is not equal for chop A and ' \ 'chop B.' def test_odd_number_images(self) -> None: with fits.open(self.fitsfile) as hdulist: del hdulist[11] hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read8', input_dir=self.test_dir + 'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read8') assert len(warning) == 2 assert warning[0].message.args[0] == f'FITS file contains odd number of images: ' \ f'{self.fitsfile}' assert warning[1].message.args[0] == 'The number of chop cycles (5) is not equal to ' \ 'half the number of available HDU images (4).'
class TestFrameSelection: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir + 'images') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir + 'images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) attr = self.pipeline.get_attribute('read', 'NDIT', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('read', 'NFRAMES', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (2, ) self.pipeline.set_attribute('read', 'NDIT', [4, 4], static=False) def test_remove_last_frame(self) -> None: module = RemoveLastFrameModule(name_in='last', image_in_tag='read', image_out_tag='last') self.pipeline.add_module(module) self.pipeline.run_module('last') data = self.pipeline.get_data('last') assert np.sum(data) == pytest.approx(84.68885503527224, rel=self.limit, abs=0.) assert data.shape == (8, 11, 11) self.pipeline.set_attribute('last', 'PARANG', np.arange(8.), static=False) self.pipeline.set_attribute('last', 'STAR_POSITION', np.full((8, 2), 5.), static=False) attr = self.pipeline.get_attribute('last', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(28., rel=self.limit, abs=0.) assert attr.shape == (8, ) attr = self.pipeline.get_attribute('last', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(80., rel=self.limit, abs=0.) assert attr.shape == (8, 2) def test_remove_start_frame(self) -> None: module = RemoveStartFramesModule(frames=1, name_in='start', image_in_tag='last', image_out_tag='start') self.pipeline.add_module(module) self.pipeline.run_module('start') data = self.pipeline.get_data('start') assert np.sum(data) == pytest.approx(64.44307047549808, rel=self.limit, abs=0.) assert data.shape == (6, 11, 11) attr = self.pipeline.get_attribute('start', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(24., rel=self.limit, abs=0.) assert attr.shape == (6, ) attr = self.pipeline.get_attribute('start', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(60., rel=self.limit, abs=0.) assert attr.shape == (6, 2) def test_remove_frames(self) -> None: module = RemoveFramesModule(name_in='remove', image_in_tag='start', selected_out_tag='selected', removed_out_tag='removed', frames=[2, 5]) self.pipeline.add_module(module) self.pipeline.run_module('remove') data = self.pipeline.get_data('selected') assert np.sum(data) == pytest.approx(43.68337741822863, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('removed') assert np.sum(data) == pytest.approx(20.759693057269445, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) attr = self.pipeline.get_attribute('selected', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(14., rel=self.limit, abs=0.) assert attr.shape == (4, ) attr = self.pipeline.get_attribute('selected', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(40., rel=self.limit, abs=0.) assert attr.shape == (4, 2) attr = self.pipeline.get_attribute('removed', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(10., rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('removed', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(20., rel=self.limit, abs=0.) assert attr.shape == (2, 2) def test_frame_selection(self) -> None: module = FrameSelectionModule(name_in='select1', image_in_tag='start', selected_out_tag='selected1', removed_out_tag='removed1', index_out_tag='index1', method='median', threshold=2., fwhm=0.1, aperture=('circular', 0.1), position=(None, None, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('select1') data = self.pipeline.get_data('selected1') assert np.sum(data) == pytest.approx(54.58514780071149, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) data = self.pipeline.get_data('removed1') assert np.sum(data) == pytest.approx(9.857922674786586, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) data = self.pipeline.get_data('index1') assert np.sum(data) == pytest.approx(5, rel=self.limit, abs=0.) assert data.shape == (1, ) attr = self.pipeline.get_attribute('selected1', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(17., rel=self.limit, abs=0.) assert attr.shape == (5, ) attr = self.pipeline.get_attribute('selected1', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(50, rel=self.limit, abs=0.) assert attr.shape == (5, 2) attr = self.pipeline.get_attribute('removed1', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(7., rel=self.limit, abs=0.) assert attr.shape == (1, ) attr = self.pipeline.get_attribute('removed1', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (1, 2) module = FrameSelectionModule(name_in='select2', image_in_tag='start', selected_out_tag='selected2', removed_out_tag='removed2', index_out_tag='index2', method='max', threshold=1., fwhm=0.1, aperture=('annulus', 0.05, 0.1), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select2') data = self.pipeline.get_data('selected2') assert np.sum(data) == pytest.approx(21.42652724866543, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) data = self.pipeline.get_data('removed2') assert np.sum(data) == pytest.approx(43.016543226832646, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('index2') assert np.sum(data) == pytest.approx(10, rel=self.limit, abs=0.) assert data.shape == (4, ) attr = self.pipeline.get_attribute('selected2', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(8., rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('selected2', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(20, rel=self.limit, abs=0.) assert attr.shape == (2, 2) attr = self.pipeline.get_attribute('removed2', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(16., rel=self.limit, abs=0.) assert attr.shape == (4, ) attr = self.pipeline.get_attribute('removed2', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(40, rel=self.limit, abs=0.) assert attr.shape == (4, 2) module = FrameSelectionModule(name_in='select3', image_in_tag='start', selected_out_tag='selected3', removed_out_tag='removed3', index_out_tag='index3', method='range', threshold=(10., 10.7), fwhm=0.1, aperture=('circular', 0.1), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select3') data = self.pipeline.get_data('selected3') assert np.sum(data) == pytest.approx(22.2568501695632, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) data = self.pipeline.get_data('removed3') assert np.sum(data) == pytest.approx(42.18622030593487, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('index3') assert np.sum(data) == pytest.approx(12, rel=self.limit, abs=0.) assert data.shape == (4, ) def test_image_statistics_full(self) -> None: module = ImageStatisticsModule(name_in='stat1', image_in_tag='read', stat_out_tag='stat1', position=None) self.pipeline.add_module(module) self.pipeline.run_module('stat1') data = self.pipeline.get_data('stat1') assert np.sum(data) == pytest.approx(115.68591492205017, rel=self.limit, abs=0.) assert data.shape == (10, 6) def test_image_statistics_posiiton(self) -> None: module = ImageStatisticsModule(name_in='stat2', image_in_tag='read', stat_out_tag='stat2', position=(5, 5, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('stat2') data = self.pipeline.get_data('stat2') assert np.sum(data) == pytest.approx(118.7138708968444, rel=self.limit, abs=0.) assert data.shape == (10, 6) def test_frame_similarity_mse(self) -> None: module = FrameSimilarityModule(name_in='simi1', image_tag='read', method='MSE', mask_radius=(0., 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('simi1') attr = self.pipeline.get_attribute('read', 'MSE', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(0.11739141370277852, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_frame_similarity_pcc(self) -> None: module = FrameSimilarityModule(name_in='simi2', image_tag='read', method='PCC', mask_radius=(0., 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('simi2') attr = self.pipeline.get_attribute('read', 'PCC', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(9.134820985662829, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_frame_similarity_ssim(self) -> None: module = FrameSimilarityModule(name_in='simi3', image_tag='read', method='SSIM', mask_radius=(0., 0.2), temporal_median='constant') self.pipeline.add_module(module) self.pipeline.run_module('simi3') attr = self.pipeline.get_attribute('read', 'SSIM', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(9.096830542868524, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_select_by_attribute(self) -> None: self.pipeline.set_attribute('read', 'INDEX', np.arange(44), static=False) module = SelectByAttributeModule(name_in='frame_removal_1', image_in_tag='read', attribute_tag='SSIM', number_frames=6, order='descending', selected_out_tag='select_sim', removed_out_tag='remove_sim') self.pipeline.add_module(module) self.pipeline.run_module('frame_removal_1') attr = self.pipeline.get_attribute('select_sim', 'INDEX', static=False) assert np.sum(attr) == pytest.approx(946, rel=self.limit, abs=0.) assert attr.shape == (44, ) attr = self.pipeline.get_attribute('select_sim', 'SSIM', static=False) assert np.sum(attr) == pytest.approx(5.556889532446573, rel=self.limit, abs=0.) assert attr.shape == (6, ) attr = self.pipeline.get_attribute('remove_sim', 'SSIM', static=False) assert np.sum(attr) == pytest.approx(3.539941010421951, rel=self.limit, abs=0.) assert attr.shape == (4, ) def test_residual_selection(self) -> None: module = ResidualSelectionModule(name_in='residual_select', image_in_tag='start', selected_out_tag='res_selected', removed_out_tag='res_removed', percentage=80., annulus_radii=(0.1, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('residual_select') data = self.pipeline.get_data('res_selected') assert np.sum(data) == pytest.approx(41.77295229983322, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('res_removed') assert np.sum(data) == pytest.approx(22.670118175664847, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11)
class TestFrameSelection: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'images', ndit=10, nframes=11) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['images']) def test_read_data(self): module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir + 'images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0001002167910262529, rtol=limit, atol=0.) assert data.shape == (44, 100, 100) def test_remove_last_frame(self): module = RemoveLastFrameModule(name_in='last', image_in_tag='read', image_out_tag='last') self.pipeline.add_module(module) self.pipeline.run_module('last') data = self.pipeline.get_data('last') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010020258903646778, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) self.pipeline.set_attribute('last', 'PARANG', np.arange(0., 40., 1.), static=False) star = np.zeros((40, 2)) star[:, 0] = np.arange(40., 80., 1.) star[:, 1] = np.arange(40., 80., 1.) self.pipeline.set_attribute('last', 'STAR_POSITION', star, static=False) attribute = self.pipeline.get_attribute('last', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 19.5, rtol=limit, atol=0.) assert attribute.shape == (40, ) attribute = self.pipeline.get_attribute('last', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 59.5, rtol=limit, atol=0.) assert attribute.shape == (40, 2) def test_remove_start_frame(self): module = RemoveStartFramesModule(frames=2, name_in='start', image_in_tag='last', image_out_tag='start') self.pipeline.add_module(module) self.pipeline.run_module('start') data = self.pipeline.get_data('start') assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010011298467340513, rtol=limit, atol=0.) assert data.shape == (32, 100, 100) attribute = self.pipeline.get_attribute('start', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 20.5, rtol=limit, atol=0.) assert attribute.shape == (32, ) attribute = self.pipeline.get_attribute('start', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 60.5, rtol=limit, atol=0.) assert attribute.shape == (32, 2) def test_remove_frames(self): module = RemoveFramesModule(name_in='remove', image_in_tag='start', selected_out_tag='selected', removed_out_tag='removed', frames=[5, 8, 13, 25, 31]) self.pipeline.add_module(module) self.pipeline.run_module('remove') data = self.pipeline.get_data('selected') assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.984682304434105e-05, rtol=limit, atol=0.) assert data.shape == (27, 100, 100) data = self.pipeline.get_data('removed') assert np.allclose(data[0, 50, 50], 0.09818692015286978, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010155025747035087, rtol=limit, atol=0.) assert data.shape == (5, 100, 100) attribute = self.pipeline.get_attribute('selected', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 20.296296296296298, rtol=limit, atol=0.) assert attribute.shape == (27, ) attribute = self.pipeline.get_attribute('selected', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 60.2962962962963, rtol=limit, atol=0.) assert attribute.shape == (27, 2) attribute = self.pipeline.get_attribute('removed', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 21.6, rtol=limit, atol=0.) assert attribute.shape == (5, ) attribute = self.pipeline.get_attribute('removed', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 61.6, rtol=limit, atol=0.) assert attribute.shape == (5, 2) def test_frame_selection(self): module = FrameSelectionModule(name_in='select1', image_in_tag='start', selected_out_tag='selected1', removed_out_tag='removed1', index_out_tag='index1', method='median', threshold=1., fwhm=0.1, aperture=('circular', 0.2), position=(None, None, 0.5)) self.pipeline.add_module(module) self.pipeline.run_module('select1') data = self.pipeline.get_data('selected1') assert np.allclose(data[0, 50, 50], 0.09791350617182591, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.980792188317311e-05, rtol=limit, atol=0.) assert data.shape == (22, 100, 100) data = self.pipeline.get_data('removed1') assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010078412281191547, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) data = self.pipeline.get_data('index1') assert data[-1] == 28 assert np.sum(data) == 115 assert data.shape == (10, ) attribute = self.pipeline.get_attribute('selected1', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 22.681818181818183, rtol=limit, atol=0.) assert attribute.shape == (22, ) attribute = self.pipeline.get_attribute('selected1', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (22, 2) attribute = self.pipeline.get_attribute('removed1', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 15.7, rtol=limit, atol=0.) assert attribute.shape == (10, ) attribute = self.pipeline.get_attribute('removed1', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (10, 2) module = FrameSelectionModule(name_in='select2', image_in_tag='start', selected_out_tag='selected2', removed_out_tag='removed2', index_out_tag='index2', method='max', threshold=3., fwhm=0.1, aperture=('annulus', 0.1, 0.2), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select2') data = self.pipeline.get_data('selected2') assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010037996502199598, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('removed2') assert np.allclose(data[0, 50, 50], 0.097912284606689, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.966801742575358e-05, rtol=limit, atol=0.) assert data.shape == (12, 100, 100) data = self.pipeline.get_data('index2') assert data[-1] == 30 assert np.sum(data) == 230 assert data.shape == (12, ) attribute = self.pipeline.get_attribute('selected2', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 17.8, rtol=limit, atol=0.) assert attribute.shape == (20, ) attribute = self.pipeline.get_attribute('selected2', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (20, 2) attribute = self.pipeline.get_attribute('removed2', 'PARANG', static=False) assert np.allclose(np.mean(attribute), 25.0, rtol=limit, atol=0.) assert attribute.shape == (12, ) attribute = self.pipeline.get_attribute('removed2', 'STAR_POSITION', static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (12, 2) def test_image_statistics_full(self): module = ImageStatisticsModule(name_in='stat1', image_in_tag='read', stat_out_tag='stat1', position=None) self.pipeline.add_module(module) self.pipeline.run_module('stat1') data = self.pipeline.get_data('stat1') assert np.allclose(data[0, 0], -0.0007312880198509591, rtol=limit, atol=0.) assert np.allclose(np.sum(data), 48.479917666979716, rtol=limit, atol=0.) assert data.shape == (44, 6) def test_image_statistics_posiiton(self): module = ImageStatisticsModule(name_in='stat2', image_in_tag='read', stat_out_tag='stat2', position=(70, 20, 0.5)) self.pipeline.add_module(module) self.pipeline.run_module('stat2') data = self.pipeline.get_data('stat2') assert np.allclose(data[0, 0], -0.0006306714900382097, rtol=limit, atol=0.) assert np.allclose(np.sum(data), -0.05448258074038106, rtol=limit, atol=0.) assert data.shape == (44, 6) def test_frame_similarity_mse(self): module = FrameSimilarityModule(name_in='simi1', image_tag='read', method='MSE', mask_radius=(0., 1.)) self.pipeline.add_module(module) self.pipeline.run_module('simi1') similarity = self.pipeline.get_attribute('read', 'MSE', static=False) assert len(similarity) == self.pipeline.get_shape('read')[0] assert np.min(similarity) > 0 assert similarity[4] != similarity[8] assert np.allclose(np.sum(similarity), 1.7938335695664495e-06, rtol=limit, atol=0.) assert np.allclose(similarity[0], 4.103008589430469e-08, rtol=limit, atol=0.) def test_frame_similarity_pcc(self): module = FrameSimilarityModule(name_in='simi2', image_tag='read', method='PCC', mask_radius=(0., 1.)) self.pipeline.add_module(module) self.pipeline.run_module('simi2') similarity = self.pipeline.get_attribute('read', 'PCC', static=False) assert len(similarity) == self.pipeline.get_shape('read')[0] assert np.min(similarity) > 0 assert np.max(similarity) < 1 assert similarity[4] != similarity[8] assert np.allclose(np.sum(similarity), 43.974652830856314, rtol=limit, atol=0.) assert np.allclose(similarity[0], 0.9994193494590345, rtol=limit, atol=0.) def test_frame_similarity_ssim(self): module = FrameSimilarityModule(name_in='simi3', image_tag='read', method='SSIM', mask_radius=(0., 1.), temporal_median='constant') self.pipeline.add_module(module) self.pipeline.run_module('simi3') similarity = self.pipeline.get_attribute('read', 'SSIM', static=False) assert len(similarity) == self.pipeline.get_shape('read')[0] assert np.min(similarity) > 0 assert np.max(similarity) < 1 assert similarity[4] != similarity[8] assert np.allclose(np.sum(similarity), 43.999059977871184, rtol=limit, atol=0.) assert np.allclose(similarity[0], 0.9999793908738922, rtol=limit, atol=0.) def test_select_by_attribute(self): total_length = self.pipeline.get_shape('read')[0] self.pipeline.set_attribute('read', 'INDEX', range(total_length), static=False) attribute_tag = 'SSIM' module = SelectByAttributeModule(name_in='frame_removal_1', image_in_tag='read', attribute_tag=attribute_tag, number_frames=6, order='descending', selected_out_tag='select_sim', removed_out_tag='remove_sim') self.pipeline.add_module(module) self.pipeline.run_module('frame_removal_1') index = self.pipeline.get_attribute('select_sim', 'INDEX', static=False) similarity = self.pipeline.get_attribute('select_sim', attribute_tag, static=False) sim_removed = self.pipeline.get_attribute('remove_sim', attribute_tag, static=False) # check attribute length assert self.pipeline.get_shape('select_sim')[0] == 6 assert len(similarity) == 6 assert len(similarity) == len(index) assert len(similarity) + len(sim_removed) == total_length # check sorted assert all(similarity[i] >= similarity[i + 1] for i in range(len(similarity) - 1)) # check that the selected attributes are in the correct tags assert np.min(similarity) > np.max(sim_removed)
class TestStackSubset: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'data') create_star_data(path=self.test_dir + 'extra') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['data', 'extra']) def test_read_data(self): read = FitsReadingModule(name_in='read1', image_tag='images', input_dir=self.test_dir + 'data', overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module('read1') data = self.pipeline.get_data('images') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) read = FitsReadingModule(name_in='read2', image_tag='extra', input_dir=self.test_dir + 'extra', overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module('read2') extra = self.pipeline.get_data('extra') assert np.allclose(data, extra, rtol=limit, atol=0.) def test_stack_and_subset(self): self.pipeline.set_attribute('images', 'PARANG', np.arange(1., 41., 1.), static=False) stack = StackAndSubsetModule(name_in='stack', image_in_tag='images', image_out_tag='stack', random=10, stacking=2) self.pipeline.add_module(stack) self.pipeline.run_module('stack') data = self.pipeline.get_data('stack') assert np.allclose(data[0, 50, 50], 0.09816320034649725, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.983545774937238e-05, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) data = self.pipeline.get_data('header_stack/INDEX') index = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] assert np.allclose(data, index, rtol=limit, atol=0.) assert data.shape == (10, ) data = self.pipeline.get_data('header_stack/PARANG') parang = [1.5, 15.5, 19.5, 23.5, 25.5, 29.5, 31.5, 35.5, 37.5, 39.5] assert np.allclose(data, parang, rtol=limit, atol=0.) assert data.shape == (10, ) def test_mean_cube(self): with pytest.warns(DeprecationWarning) as warning: mean = MeanCubeModule(name_in='mean', image_in_tag='images', image_out_tag='mean') assert len(warning) == 1 assert warning[0].message.args[0] == 'The MeanCubeModule will be be deprecated in a ' \ 'future release. Please use the StackCubesModule ' \ 'instead.' self.pipeline.add_module(mean) self.pipeline.run_module('mean') data = self.pipeline.get_data('mean') assert np.allclose(data[0, 50, 50], 0.09805840100024205, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738069, rtol=limit, atol=0.) assert data.shape == (4, 100, 100) attribute = self.pipeline.get_attribute('mean', 'INDEX', static=False) assert np.allclose(np.mean(attribute), 1.5, rtol=limit, atol=0.) assert attribute.shape == (4, ) attribute = self.pipeline.get_attribute('mean', 'NFRAMES', static=False) assert np.allclose(np.mean(attribute), 1, rtol=limit, atol=0.) assert attribute.shape == (4, ) def test_stack_cube(self): module = StackCubesModule(name_in='stackcube', image_in_tag='images', image_out_tag='mean', combine='mean') self.pipeline.add_module(module) self.pipeline.run_module('stackcube') data = self.pipeline.get_data('mean') assert np.allclose(data[0, 50, 50], 0.09805840100024205, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738069, rtol=limit, atol=0.) assert data.shape == (4, 100, 100) attribute = self.pipeline.get_attribute('mean', 'INDEX', static=False) assert np.allclose(np.mean(attribute), 1.5, rtol=limit, atol=0.) assert attribute.shape == (4, ) attribute = self.pipeline.get_attribute('mean', 'NFRAMES', static=False) assert np.allclose(np.mean(attribute), 1, rtol=limit, atol=0.) assert attribute.shape == (4, ) def test_derotate_and_stack(self): derotate = DerotateAndStackModule(name_in='derotate1', image_in_tag='images', image_out_tag='derotate1', derotate=True, stack='mean', extra_rot=10.) self.pipeline.add_module(derotate) self.pipeline.run_module('derotate1') data = self.pipeline.get_data('derotate1') assert np.allclose(data[0, 50, 50], 0.09689679769268554, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010021671152246617, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) derotate = DerotateAndStackModule(name_in='derotate2', image_in_tag='images', image_out_tag='derotate2', derotate=False, stack='median', extra_rot=0.) self.pipeline.add_module(derotate) self.pipeline.run_module('derotate2') data = self.pipeline.get_data('derotate2') assert np.allclose(data[0, 50, 50], 0.09809001768003645, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010033064394962, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) def test_combine_tags(self): combine = CombineTagsModule(image_in_tags=('images', 'extra'), check_attr=True, index_init=False, name_in='combine1', image_out_tag='combine1') self.pipeline.add_module(combine) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('combine1') assert len(warning) == 1 assert warning[0].message.args[0] == 'The non-static keyword FILES is already used but ' \ 'with different values. It is advisable to only ' \ 'combine tags that descend from the same data set.' data = self.pipeline.get_data('combine1') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738068, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('header_combine1/INDEX') assert data[40] == 0 assert data.shape == (80, ) combine = CombineTagsModule(image_in_tags=('images', 'extra'), check_attr=False, index_init=True, name_in='combine2', image_out_tag='combine2') self.pipeline.add_module(combine) self.pipeline.run_module('combine2') data = self.pipeline.get_data('combine1') extra = self.pipeline.get_data('combine2') assert np.allclose(data, extra, rtol=limit, atol=0.) data = self.pipeline.get_data('header_combine2/INDEX') assert data[40] == 40 assert data.shape == (80, )
class TestFitsReadingModule(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir + "fits") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["fits"]) def test_fits_reading(self): read = FitsReadingModule(name_in="read1", input_dir=self.test_dir + "fits", image_tag="input", overwrite=False, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read1") data = self.pipeline.get_data("input") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_fits_reading_overwrite(self): read = FitsReadingModule(name_in="read2", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read2") data = self.pipeline.get_data("input") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_static_not_found(self): self.pipeline.set_attribute("config", "DIT", "ESO DET DIT", static=True) read = FitsReadingModule(name_in="read3", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("read3") assert len(warning) == 4 for item in warning: assert item.message.args[0] == "Static attribute DIT (=ESO DET DIT) not found in " \ "the FITS header." def test_static_changing(self): with fits.open(self.test_dir + "fits/image01.fits") as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + "fits/image01.fits", overwrite=True) with fits.open(self.test_dir + "fits/image02.fits") as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + "fits/image02.fits", overwrite=True) with fits.open(self.test_dir + "fits/image03.fits") as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.2 hdu.writeto(self.test_dir + "fits/image03.fits", overwrite=True) with fits.open(self.test_dir + "fits/image04.fits") as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.2 hdu.writeto(self.test_dir + "fits/image04.fits", overwrite=True) read = FitsReadingModule(name_in="read4", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("read4") assert len(warning) == 2 assert warning[0].message.args[0] == "Static attribute ESO DET DIT has changed. " \ "Possibly the current file image03.fits does " \ "not belong to the data set 'input'. Attribute " \ "value is updated." assert warning[1].message.args[0] == "Static attribute ESO DET DIT has changed. " \ "Possibly the current file image04.fits does " \ "not belong to the data set 'input'. Attribute " \ "value is updated." def test_header_attribute(self): with fits.open(self.test_dir + "fits/image01.fits") as hdu: header = hdu[0].header header['PARANG'] = 1.0 hdu.writeto(self.test_dir + "fits/image01.fits", overwrite=True) with fits.open(self.test_dir + "fits/image02.fits") as hdu: header = hdu[0].header header['PARANG'] = 2.0 hdu.writeto(self.test_dir + "fits/image02.fits", overwrite=True) with fits.open(self.test_dir + "fits/image03.fits") as hdu: header = hdu[0].header header['PARANG'] = 3.0 header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + "fits/image03.fits", overwrite=True) with fits.open(self.test_dir + "fits/image04.fits") as hdu: header = hdu[0].header header['PARANG'] = 4.0 header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir + "fits/image04.fits", overwrite=True) read = FitsReadingModule(name_in="read5", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read5") def test_non_static_not_found(self): self.pipeline.set_attribute("config", "DIT", "None", static=True) for i in range(1, 5): with fits.open(self.test_dir + "fits/image0" + str(i) + ".fits") as hdu: header = hdu[0].header del header['HIERARCH ESO DET DIT'] del header['HIERARCH ESO DET EXP NO'] hdu.writeto(self.test_dir + "fits/image0" + str(i) + ".fits", overwrite=True) read = FitsReadingModule(name_in="read6", input_dir=self.test_dir + "fits", image_tag="input", overwrite=True, check=True) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("read6") assert len(warning) == 4 for item in warning: assert item.message.args[0] == "Non-static attribute EXP_NO (=ESO DET EXP NO) not " \ "found in the FITS header." def test_fits_read_files(self): folder = os.path.dirname(os.path.abspath(__file__)) read = FitsReadingModule(name_in="read7", input_dir=None, image_tag="files", overwrite=False, check=True, filenames=[ folder + "/fits/image01.fits", folder + "/fits/image03.fits" ]) self.pipeline.add_module(read) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("read7") assert len(warning) == 2 for item in warning: assert item.message.args[0] == "Non-static attribute EXP_NO (=ESO DET EXP NO) not " \ "found in the FITS header." data = self.pipeline.get_data("files") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010032245393723324, rtol=limit, atol=0.) assert data.shape == (20, 100, 100)
class TestPsfSubtractionSdi: def setup_class(self) -> None: self.limit = 1e-5 self.test_dir = os.path.dirname(__file__) + '/' create_ifs_data(self.test_dir+'science') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['science']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='science', input_dir=self.test_dir+'science', ifs_data=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('science') assert np.sum(data) == pytest.approx(749.8396528807368, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) self.pipeline.set_attribute('science', 'WAVELENGTH', [1., 1.1, 1.2], static=False) self.pipeline.set_attribute('science', 'PARANG', np.linspace(0., 180., 10), static=False) def test_psf_subtraction_sdi(self) -> None: processing_types = ['ADI', 'SDI+ADI', 'ADI+SDI'] expected = [[-0.176152493909826, -0.7938155399702668, 19.552033067005578, -0.21617058715490922], [-0.004568154679096975, -0.08621264803633322, 2.2901225325010888, -0.010269745733878437], [0.008630501634061892, -0.05776205365084376, -0.4285370289350482, 0.0058856438951644455]] shape_expc = [(2, 3, 21, 21), (2, 2, 3, 21, 21), (1, 1, 3, 21, 21)] pca_numbers = [range(1, 3), (range(1, 3), range(1, 3)), ([1], [1])] res_arr_tags = [None, None, 'res_arr_single_sdi_ADI+SDI'] for i, p_type in enumerate(processing_types): module = PcaPsfSubtractionModule(pca_numbers=pca_numbers[i], name_in='pca_single_sdi_'+p_type, images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_single_sdi_'+p_type, res_median_tag='res_median_single_sdi_'+p_type, res_weighted_tag='res_weighted_single_sdi_'+p_type, res_rot_mean_clip_tag='res_clip_single_sdi_'+p_type, res_arr_out_tag=res_arr_tags[i], basis_out_tag='basis_single_sdi_'+p_type, extra_rot=0., subtract_mean=True, processing_type=p_type) self.pipeline.add_module(module) self.pipeline.run_module('pca_single_sdi_'+p_type) data = self.pipeline.get_data('res_mean_single_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][0], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_median_single_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][1], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_weighted_single_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][2], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_clip_single_sdi_'+p_type) # assert np.sum(data) == pytest.approx(expected[i][3], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] # data = self.pipeline.get_data('basis_single_sdi_'+p_type) # assert np.sum(data) == pytest.approx(-1.3886119555248766, rel=self.limit, abs=0.) # assert data.shape == (5, 30, 30) def test_psf_subtraction_sdi_multi(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 processing_types = ['SDI', 'ADI+SDI'] pca_numbers = [range(1, 3), (range(1, 3), range(1, 3))] expected = [[-0.0044942456603888695, 0.02613693149969979, -0.15045543311096457, -0.008432530081399985], [-0.0094093643053501, -0.08171546066331437, 0.560810054788774, -0.014527353460544753]] shape_expc = [(2, 3, 21, 21), (2, 2, 3, 21, 21)] for i, p_type in enumerate(processing_types): module = PcaPsfSubtractionModule(pca_numbers=pca_numbers[i], name_in='pca_multi_sdi_'+p_type, images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_multi_sdi_'+p_type, res_median_tag='res_median_multi_sdi_'+p_type, res_weighted_tag='res_weighted_multi_sdi_'+p_type, res_rot_mean_clip_tag='res_clip_multi_sdi_'+p_type, res_arr_out_tag=None, basis_out_tag=None, extra_rot=0., subtract_mean=True, processing_type=p_type) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi_sdi_'+p_type) data = self.pipeline.get_data('res_mean_multi_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][0], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_median_multi_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][1], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_weighted_multi_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][2], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_clip_multi_sdi_'+p_type) # assert np.sum(data) == pytest.approx(expected[i][3], rel=self.limit, abs=0.) assert data.shape == shape_expc[i]
class TestFluxAndPosition(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir + "flux", npix_x=101, npix_y=101) create_star_data(path=self.test_dir + "psf", npix_x=15, npix_y=15, x0=[7., 7., 7., 7.], y0=[7., 7., 7., 7.], ndit=1, nframes=1, noise=False) create_fake(path=self.test_dir + "adi", ndit=[5, 5, 5, 5], nframes=[5, 5, 5, 5], exp_no=[1, 2, 3, 4], npix=(15, 15), fwhm=3., x0=[7., 7., 7., 7.], y0=[7., 7., 7., 7.], angles=[[0., 50.], [50., 100.], [100., 150.], [150., 200.]], sep=5.5, contrast=1.) create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["flux", "adi", "psf"]) def test_read_data(self): read = FitsReadingModule(name_in="read1", image_tag="read", input_dir=self.test_dir + "flux") self.pipeline.add_module(read) self.pipeline.run_module("read1") data = self.pipeline.get_data("read") assert np.allclose(data[0, 50, 50], 0.0986064357966972, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.827812356946396e-05, rtol=limit, atol=0.) assert data.shape == (40, 101, 101) read = FitsReadingModule(name_in="read2", image_tag="adi", input_dir=self.test_dir + "adi") self.pipeline.add_module(read) self.pipeline.run_module("read2") data = self.pipeline.get_data("adi") assert np.allclose(data[0, 7, 7], 0.09823888178122618, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.008761678820997612, rtol=limit, atol=0.) assert data.shape == (20, 15, 15) read = FitsReadingModule(name_in="read3", image_tag="psf", input_dir=self.test_dir + "psf") self.pipeline.add_module(read) self.pipeline.run_module("read3") data = self.pipeline.get_data("psf") assert np.allclose(data[0, 7, 7], 0.09806026673451182, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444444429123135, rtol=limit, atol=0.) assert data.shape == (4, 15, 15) def test_aperture_photometry(self): photometry = AperturePhotometryModule(radius=0.1, position=None, name_in="photometry", image_in_tag="read", phot_out_tag="photometry") self.pipeline.add_module(photometry) self.pipeline.run_module("photometry") data = self.pipeline.get_data("photometry") assert np.allclose(data[0][0], 0.9702137183213615, rtol=limit, atol=0.) assert np.allclose(data[39][0], 0.9691512171281103, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.9691752104364761, rtol=limit, atol=0.) assert data.shape == (40, 1) def test_angle_interpolation(self): angle = AngleInterpolationModule(name_in="angle", data_tag="read") self.pipeline.add_module(angle) self.pipeline.run_module("angle") data = self.pipeline.get_data("header_read/PARANG") assert data[5] == 2.7777777777777777 assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_fake_planet(self): fake = FakePlanetModule(position=(0.5, 90.), magnitude=6., psf_scaling=1., interpolation="spline", name_in="fake", image_in_tag="read", psf_in_tag="read", image_out_tag="fake") self.pipeline.add_module(fake) self.pipeline.run_module("fake") data = self.pipeline.get_data("fake") assert np.allclose(data[0, 50, 50], 0.09860622347589054, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.867026482551375e-05, rtol=limit, atol=0.) assert data.shape == (40, 101, 101) def test_psf_subtraction(self): pca = PcaPsfSubtractionModule(pca_numbers=(2, ), name_in="pca", images_in_tag="fake", reference_in_tag="fake", res_mean_tag="res_mean", res_median_tag=None, res_arr_out_tag=None, res_rot_mean_clip_tag=None, extra_rot=0.) self.pipeline.add_module(pca) self.pipeline.run_module("pca") data = self.pipeline.get_data("res_mean") assert np.allclose(data[0, 49, 31], 4.8963214463463886e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.8409659677297164e-08, rtol=limit, atol=0.) assert data.shape == (1, 101, 101) def test_false_positive(self): false = FalsePositiveModule(position=(31., 49.), aperture=0.1, ignore=True, name_in="false", image_in_tag="res_mean", snr_out_tag="snr_fpf") self.pipeline.add_module(false) self.pipeline.run_module("false") data = self.pipeline.get_data("snr_fpf") assert np.allclose(data[0, 2], 0.5280553948214145, rtol=limit, atol=0.) assert np.allclose(data[0, 3], 94.39870535499551, rtol=limit, atol=0.) assert np.allclose(data[0, 4], 8.542166952478182, rtol=limit, atol=0.) assert np.allclose(data[0, 5], 9.54772666372783e-07, rtol=limit, atol=0.) def test_simplex_minimization(self): simplex = SimplexMinimizationModule(position=(31., 49.), magnitude=6., psf_scaling=-1., name_in="simplex", image_in_tag="fake", psf_in_tag="read", res_out_tag="simplex_res", flux_position_tag="flux_position", merit="hessian", aperture=0.1, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.1, edge_size=None, extra_rot=0.) self.pipeline.add_module(simplex) self.pipeline.run_module("simplex") data = self.pipeline.get_data("simplex_res") assert np.allclose(data[0, 50, 31], 0.00020085220731657478, rtol=limit, atol=0.) assert np.allclose(data[65, 50, 31], 2.5035345163849688e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.416893585673146e-09, rtol=limit, atol=0.) assert data.shape == (66, 101, 101) data = self.pipeline.get_data("flux_position") assert np.allclose(data[65, 0], 32.14539423594633, rtol=limit, atol=0.) assert np.allclose(data[65, 1], 50.40994810153265, rtol=limit, atol=0.) assert np.allclose(data[65, 2], 0.4955803200991986, rtol=limit, atol=0.) assert np.allclose(data[65, 3], 90.28110395762462, rtol=limit, atol=0.) assert np.allclose(data[65, 4], 5.744096115502183, rtol=limit, atol=0.) assert data.shape == (66, 6) def test_mcmc_sampling_poisson(self): self.pipeline.set_attribute("adi", "PARANG", np.arange(0., 200., 10.), static=False) scale = ScaleImagesModule(scaling=(None, None, 100.), pixscale=False, name_in="scale1", image_in_tag="adi", image_out_tag="adi_scale") self.pipeline.add_module(scale) self.pipeline.run_module("scale1") data = self.pipeline.get_data("adi_scale") assert np.allclose(data[0, 7, 7], 9.82388817812263, rtol=limit, atol=0.) assert data.shape == (20, 15, 15) scale = ScaleImagesModule(scaling=(None, None, 100.), pixscale=False, name_in="scale2", image_in_tag="psf", image_out_tag="psf_scale") self.pipeline.add_module(scale) self.pipeline.run_module("scale2") data = self.pipeline.get_data("psf_scale") assert np.allclose(data[0, 7, 7], 9.806026673451198, rtol=limit, atol=0.) assert data.shape == (4, 15, 15) avg_psf = DerotateAndStackModule(name_in="take_psf_avg", image_in_tag="psf_scale", image_out_tag="psf_avg", derotate=False, stack="mean") self.pipeline.add_module(avg_psf) self.pipeline.run_module("take_psf_avg") data = self.pipeline.get_data("psf_avg") assert data.shape == (15, 15) mcmc = MCMCsamplingModule(param=(0.1485, 0., 0.), bounds=((0.1, 0.25), (-5., 5.), (-0.5, 0.5)), name_in="mcmc", image_in_tag="adi_scale", psf_in_tag="psf_avg", chain_out_tag="mcmc", nwalkers=50, nsteps=150, psf_scaling=-1., pca_number=1, aperture=0.1, mask=None, extra_rot=0., scale=2., sigma=(1e-3, 1e-1, 1e-2), prior="flat", variance="poisson") self.pipeline.add_module(mcmc) self.pipeline.run_module("mcmc") single = self.pipeline.get_data("mcmc") single = single[:, 20:, :].reshape((-1, 3)) assert np.allclose(np.median(single[:, 0]), 0.148, rtol=0., atol=0.01) assert np.allclose(np.median(single[:, 1]), 0., rtol=0., atol=0.2) assert np.allclose(np.median(single[:, 2]), 0., rtol=0., atol=0.1) def test_mcmc_sampling_gaussian(self): database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 4 mcmc = MCMCsamplingModule(param=(0.1485, 0., 0.), bounds=((0.1, 0.25), (-5., 5.), (-0.5, 0.5)), name_in="mcmc_gaussian", image_in_tag="adi_scale", psf_in_tag="psf_avg", chain_out_tag="mcmc_gaussian", nwalkers=50, nsteps=150, psf_scaling=-1., pca_number=1, aperture=0.1, mask=None, extra_rot=0., scale=2., sigma=(1e-3, 1e-1, 1e-2), prior="flat", variance="gaussian") self.pipeline.add_module(mcmc) self.pipeline.run_module("mcmc_gaussian") single = self.pipeline.get_data("mcmc_gaussian") single = single[:, 20:, :].reshape((-1, 3)) assert np.allclose(np.median(single[:, 0]), 0.148, rtol=0., atol=0.01) assert np.allclose(np.median(single[:, 1]), 0., rtol=0., atol=0.2) assert np.allclose(np.median(single[:, 2]), 0., rtol=0., atol=0.1)
class TestFluxPosition: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir+'flux', npix_x=101, npix_y=101) create_star_data(path=self.test_dir+'ref', npix_x=101, npix_y=101) create_star_data(path=self.test_dir+'psf', npix_x=15, npix_y=15, x0=[7., 7., 7., 7.], y0=[7., 7., 7., 7.], ndit=1, nframes=1, noise=False) create_fake(path=self.test_dir+'adi', ndit=[5, 5, 5, 5], nframes=[5, 5, 5, 5], exp_no=[1, 2, 3, 4], npix=(15, 15), fwhm=3., x0=[7., 7., 7., 7.], y0=[7., 7., 7., 7.], angles=[[0., 50.], [50., 100.], [100., 150.], [150., 200.]], sep=5.5, contrast=1.) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['flux', 'adi', 'psf', 'ref']) def test_read_data(self): module = FitsReadingModule(name_in='read1', image_tag='read', input_dir=self.test_dir+'flux') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('read') assert np.allclose(data[0, 50, 50], 0.0986064357966972, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.827812356946396e-05, rtol=limit, atol=0.) assert data.shape == (40, 101, 101) module = FitsReadingModule(name_in='read2', image_tag='adi', input_dir=self.test_dir+'adi') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('adi') assert np.allclose(data[0, 7, 7], 0.09823888178122618, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.008761678820997612, rtol=limit, atol=0.) assert data.shape == (20, 15, 15) module = FitsReadingModule(name_in='read3', image_tag='psf', input_dir=self.test_dir+'psf') self.pipeline.add_module(module) self.pipeline.run_module('read3') data = self.pipeline.get_data('psf') assert np.allclose(data[0, 7, 7], 0.09806026673451182, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444444429123135, rtol=limit, atol=0.) assert data.shape == (4, 15, 15) module = FitsReadingModule(name_in='read4', image_tag='ref', input_dir=self.test_dir+'ref') self.pipeline.add_module(module) self.pipeline.run_module('read4') data = self.pipeline.get_data('ref') assert np.allclose(data[0, 50, 50], 0.0986064357966972, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.827812356946396e-05, rtol=limit, atol=0.) assert data.shape == (40, 101, 101) def test_aperture_photometry(self): with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = AperturePhotometryModule(radius=0.1, position=None, name_in='photometry', image_in_tag='read', phot_out_tag='photometry') self.pipeline.add_module(module) self.pipeline.run_module('photometry') with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = AperturePhotometryModule(radius=0.1, position=None, name_in='photometry_multi', image_in_tag='read', phot_out_tag='photometry_multi') self.pipeline.add_module(module) self.pipeline.run_module('photometry_multi') data = self.pipeline.get_data('photometry') assert np.allclose(data[0][0], 0.9853286992326858, rtol=limit, atol=0.) assert np.allclose(data[39][0], 0.9835251375574492, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.9836439188900222, rtol=limit, atol=0.) assert data.shape == (40, 1) data_multi = self.pipeline.get_data('photometry_multi') assert data.shape == data_multi.shape # Does not pass on Travis CI # assert np.allclose(data, data_multi, rtol=limit, atol=0.) def test_angle_interpolation(self): with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = AngleInterpolationModule(name_in='angle', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle') data = self.pipeline.get_data('header_read/PARANG') assert data[5] == 2.7777777777777777 assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_fake_planet(self): module = FakePlanetModule(position=(0.5, 90.), magnitude=6., psf_scaling=1., interpolation='spline', name_in='fake', image_in_tag='read', psf_in_tag='read', image_out_tag='fake') self.pipeline.add_module(module) self.pipeline.run_module('fake') data = self.pipeline.get_data('fake') assert np.allclose(data[0, 50, 50], 0.09860622347589054, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.867026482551375e-05, rtol=limit, atol=0.) assert data.shape == (40, 101, 101) def test_psf_subtraction(self): module = PcaPsfSubtractionModule(pca_numbers=[2, ], name_in='pca', images_in_tag='fake', reference_in_tag='fake', res_mean_tag='res_mean', extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('pca') data = self.pipeline.get_data('res_mean') assert np.allclose(data[0, 49, 31], 4.8963214463463886e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.8409659677297164e-08, rtol=limit, atol=0.) assert data.shape == (1, 101, 101) def test_false_positive(self): module = FalsePositiveModule(position=(31., 49.), aperture=0.1, ignore=True, name_in='false', image_in_tag='res_mean', snr_out_tag='snr_fpf') self.pipeline.add_module(module) self.pipeline.run_module('false') data = self.pipeline.get_data('snr_fpf') assert np.allclose(data[0, 0], 31.0, rtol=limit, atol=0.) assert np.allclose(data[0, 1], 49.0, rtol=limit, atol=0.) assert np.allclose(data[0, 2], 0.513710034941892, rtol=limit, atol=0.) assert np.allclose(data[0, 3], 93.01278750418334, rtol=limit, atol=0.) assert np.allclose(data[0, 4], 7.333740467578795, rtol=limit, atol=0.) assert np.allclose(data[0, 5], 4.5257622875993775e-06, rtol=limit, atol=0.) def test_simplex_minimization_hessian(self): module = SimplexMinimizationModule(name_in='simplex1', image_in_tag='fake', psf_in_tag='read', res_out_tag='simplex_res', flux_position_tag='flux_position', position=(31, 49), magnitude=6., psf_scaling=-1., merit='hessian', aperture=0.1, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.1, edge_size=None, extra_rot=0., reference_in_tag=None, residuals='median') self.pipeline.add_module(module) self.pipeline.run_module('simplex1') data = self.pipeline.get_data('simplex_res') assert np.allclose(data[0, 50, 31], 0.00013866444247059368, rtol=limit, atol=0.) assert np.allclose(data[35, 50, 31], 3.482226033356121e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 3.125137239656066e-07, rtol=limit, atol=0.) assert data.shape == (36, 101, 101) data = self.pipeline.get_data('flux_position') assert np.allclose(data[35, 0], 31.420497036174158, rtol=limit, atol=0.) assert np.allclose(data[35, 1], 50.03219573166646, rtol=limit, atol=0.) assert np.allclose(data[35, 2], 0.5016473331983896, rtol=limit, atol=0.) assert np.allclose(data[35, 3], 89.90071436787039, rtol=limit, atol=0.) assert np.allclose(data[35, 4], 6.012537296489204, rtol=limit, atol=0.) assert data.shape == (36, 6) def test_simplex_minimization_reference(self): module = SimplexMinimizationModule(name_in='simplex2', image_in_tag='fake', psf_in_tag='read', res_out_tag='simplex_res_ref', flux_position_tag='flux_position_ref', position=(31, 49), magnitude=6., psf_scaling=-1., merit='poisson', aperture=0.1, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.1, edge_size=None, extra_rot=0., reference_in_tag='ref', residuals='mean') self.pipeline.add_module(module) self.pipeline.run_module('simplex2') data = self.pipeline.get_data('simplex_res_ref') assert np.allclose(data[0, 50, 31], 0.00014188043631450017, rtol=limit, atol=0.) assert np.allclose(data[43, 50, 31], 7.217091961625108e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.3228426986034557e-06, rtol=limit, atol=0.) assert data.shape == (44, 101, 101) data = self.pipeline.get_data('flux_position_ref') assert np.allclose(data[43, 0], 31.519027018276986, rtol=limit, atol=0.) assert np.allclose(data[43, 1], 49.85541939005469, rtol=limit, atol=0.) assert np.allclose(data[43, 2], 0.4990015399214498, rtol=limit, atol=0.) assert np.allclose(data[43, 3], 90.44822801080721, rtol=limit, atol=0.) assert np.allclose(data[43, 4], 5.9953709695084605, rtol=limit, atol=0.) assert data.shape == (44, 6) def test_mcmc_sampling(self): with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 self.pipeline.set_attribute('adi', 'PARANG', np.arange(0., 200., 10.), static=False) module = DerotateAndStackModule(name_in='stack', image_in_tag='psf', image_out_tag='psf_stack', derotate=False, stack='mean') self.pipeline.add_module(module) self.pipeline.run_module('stack') data = self.pipeline.get_data('psf_stack') assert data.shape == (1, 15, 15) module = MCMCsamplingModule(name_in='mcmc', image_in_tag='adi', psf_in_tag='psf_stack', chain_out_tag='mcmc', param=(0.15, 0., 1.), bounds=((0.1, 0.2), (-2., 2.), (-1., 2.)), nwalkers=50, nsteps=150, psf_scaling=-1., pca_number=1, aperture=(7, 13, 0.1), mask=None, extra_rot=0., merit='gaussian', residuals='median', scale=2., sigma=(1e-3, 1e-1, 1e-2)) self.pipeline.add_module(module) self.pipeline.run_module('mcmc') data = self.pipeline.get_data('mcmc') data = data[50:, :, :].reshape((-1, 3)) assert np.allclose(np.median(data[:, 0]), 0.15, rtol=0., atol=0.1) assert np.allclose(np.median(data[:, 1]), 0., rtol=0., atol=1.0) assert np.allclose(np.median(data[:, 2]), 0.0, rtol=0., atol=1.) attr = self.pipeline.get_attribute('mcmc', 'ACCEPTANCE', static=True) assert np.allclose(attr, 0.3, rtol=0., atol=0.2)
class TestExtract: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'star') create_fake_data(self.test_dir+'binary') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(path=self.test_dir, folders=['star', 'binary']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='star', input_dir=self.test_dir+'star', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('star') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) module = FitsReadingModule(name_in='read2', image_tag='binary', input_dir=self.test_dir+'binary', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('binary') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) self.pipeline.set_attribute('binary', 'PARANG', -1.*np.linspace(0., 180., 10), static=False) def test_extract_position_none(self) -> None: module = StarExtractionModule(name_in='extract1', image_in_tag='star', image_out_tag='extract1', index_out_tag='index', image_size=0.2, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract1') assert len(warning) == 3 assert warning[0].message.args[0] == 'Can not store the attribute \'INSTRUMENT\' because ' \ 'the dataset \'index\' does not exist.' assert warning[1].message.args[0] == 'Can not store the attribute \'PIXSCALE\' because ' \ 'the dataset \'index\' does not exist.' assert warning[2].message.args[0] == 'Can not store the attribute \'History: ' \ 'StarExtractionModule\' because the dataset ' \ '\'index\' does not exist.' data = self.pipeline.get_data('extract1') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_extract_center_none(self) -> None: module = StarExtractionModule(name_in='extract2', image_in_tag='star', image_out_tag='extract2', index_out_tag='index', image_size=0.2, fwhm_star=0.1, position=(None, None, 0.2)) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract2') assert len(warning) == 3 assert warning[0].message.args[0] == 'Can not store the attribute \'INSTRUMENT\' because ' \ 'the dataset \'index\' does not exist.' assert warning[1].message.args[0] == 'Can not store the attribute \'PIXSCALE\' because ' \ 'the dataset \'index\' does not exist.' assert warning[2].message.args[0] == 'Can not store the attribute \'History: ' \ 'StarExtractionModule\' because the dataset ' \ '\'index\' does not exist.' data = self.pipeline.get_data('extract2') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_extract_position(self) -> None: module = StarExtractionModule(name_in='extract7', image_in_tag='star', image_out_tag='extract7', index_out_tag=None, image_size=0.2, fwhm_star=0.1, position=(5, 5, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('extract7') data = self.pipeline.get_data('extract7') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_extract_too_large(self) -> None: module = StarExtractionModule(name_in='extract3', image_in_tag='star', image_out_tag='extract3', index_out_tag=None, image_size=0.2, fwhm_star=0.1, position=(2, 2, 0.05)) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract3') assert len(warning) == 10 assert warning[0].message.args[0] == f'Chosen image size is too large to crop the image ' \ f'around the brightest pixel (image index = 0, ' \ f'pixel [x, y] = [2, 2]). Using the center of ' \ f'the image instead.' data = self.pipeline.get_data('extract3') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_star_extract_cpu(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = StarExtractionModule(name_in='extract4', image_in_tag='star', image_out_tag='extract4', index_out_tag='index', image_size=0.2, fwhm_star=0.1, position=(2, 2, 0.05)) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract4') assert len(warning) == 2 assert warning[0].message.args[0] == 'The \'index_out_port\' can only be used if ' \ 'CPU = 1. No data will be stored to this output port.' assert warning[1].message.args[0] == 'Chosen image size is too large to crop the image ' \ 'around the brightest pixel (image index = 0, ' \ 'pixel [x, y] = [2, 2]). Using the center of the ' \ 'image instead.' def test_extract_binary(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = ExtractBinaryModule(pos_center=(10., 10.), pos_binary=(10., 16.), name_in='extract5', image_in_tag='binary', image_out_tag='extract5', image_size=0.15, search_size=0.07, filter_size=None) self.pipeline.add_module(module) self.pipeline.run_module('extract5') data = self.pipeline.get_data('extract5') assert np.sum(data) == pytest.approx(1.3419098759577548, rel=self.limit, abs=0.) assert data.shape == (10, 7, 7) def test_extract_binary_filter(self) -> None: module = ExtractBinaryModule(pos_center=(10., 10.), pos_binary=(10., 16.), name_in='extract6', image_in_tag='binary', image_out_tag='extract6', image_size=0.15, search_size=0.07, filter_size=0.05) self.pipeline.add_module(module) self.pipeline.run_module('extract6') data = self.pipeline.get_data('extract6') assert np.sum(data) == pytest.approx(1.3789593661036972, rel=self.limit, abs=0.) assert data.shape == (10, 7, 7)
class TestStackSubset: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_ifs_data(self.test_dir+'data_ifs') create_star_data(self.test_dir+'data') create_star_data(self.test_dir+'extra') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data_ifs', 'extra', 'data']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='images', input_dir=self.test_dir+'data', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('images') assert np.mean(data) == pytest.approx(0.08722544528764692, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) module = FitsReadingModule(name_in='read2', image_tag='extra', input_dir=self.test_dir+'extra', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read2') extra = self.pipeline.get_data('extra') assert data == pytest.approx(extra, rel=self.limit, abs=0.) module = FitsReadingModule(name_in='read_ifs', image_tag='images_ifs', input_dir=self.test_dir+'data_ifs', overwrite=True, check=True, ifs_data=True) self.pipeline.add_module(module) self.pipeline.run_module('read_ifs') self.pipeline.set_attribute('images_ifs', 'PARANG', np.linspace(0., 180., 10), static=False) data = self.pipeline.get_data('images_ifs') assert np.sum(data) == pytest.approx(749.8396528807369, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) def test_stack_and_subset(self) -> None: self.pipeline.set_attribute('images', 'PARANG', np.arange(10.), static=False) module = StackAndSubsetModule(name_in='stack1', image_in_tag='images', image_out_tag='stack1', random=4, stacking=2, combine='mean', max_rotation=None) self.pipeline.add_module(module) self.pipeline.run_module('stack1') data = self.pipeline.get_data('stack1') assert np.mean(data) == pytest.approx(0.08758276283743936, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('header_stack1/INDEX') assert data == pytest.approx(np.arange(4), rel=self.limit, abs=0.) assert data.shape == (4, ) data = self.pipeline.get_data('header_stack1/PARANG') assert data == pytest.approx([0.5, 2.5, 6.5, 8.5], rel=self.limit, abs=0.) assert data.shape == (4, ) def test_stack_max_rotation(self) -> None: angles = np.arange(10.) angles[1:6] = 3. angles[9] = 50. self.pipeline.set_attribute('images', 'PARANG', angles, static=False) module = StackAndSubsetModule(name_in='stack2', image_in_tag='images', image_out_tag='stack2', random=None, stacking=2, combine='median', max_rotation=1.) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('stack2') assert len(warning) == 1 assert warning[0].message.args[0] == 'Testing of util.module.stack_angles has been ' \ 'limited, please use carefully.' data = self.pipeline.get_data('stack2') assert np.mean(data) == pytest.approx(0.08580759396987508, rel=self.limit, abs=0.) assert data.shape == (7, 11, 11) data = self.pipeline.get_data('header_stack2/INDEX') assert data == pytest.approx(np.arange(7), rel=self.limit, abs=0.) assert data.shape == (7, ) data = self.pipeline.get_data('header_stack2/PARANG') assert data.shape == (7, ) self.pipeline.set_attribute('images', 'PARANG', np.arange(10.), static=False) def test_stack_cube(self) -> None: module = StackCubesModule(name_in='stackcube', image_in_tag='images', image_out_tag='mean', combine='mean') self.pipeline.add_module(module) self.pipeline.run_module('stackcube') data = self.pipeline.get_data('mean') assert np.mean(data) == pytest.approx(0.08722544528764689, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) attribute = self.pipeline.get_attribute('mean', 'INDEX', static=False) assert np.mean(attribute) == pytest.approx(0.5, rel=self.limit, abs=0.) assert attribute.shape == (2, ) attribute = self.pipeline.get_attribute('mean', 'NFRAMES', static=False) assert np.mean(attribute) == pytest.approx(1, rel=self.limit, abs=0.) assert attribute.shape == (2, ) def test_derotate_and_stack(self) -> None: module = DerotateAndStackModule(name_in='derotate1', image_in_tag='images', image_out_tag='derotate1', derotate=True, stack='mean', extra_rot=10.) self.pipeline.add_module(module) self.pipeline.run_module('derotate1') data = self.pipeline.get_data('derotate1') assert np.mean(data) == pytest.approx(0.08709860116308817, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) module = DerotateAndStackModule(name_in='derotate2', image_in_tag='images', image_out_tag='derotate2', derotate=False, stack='median', extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('derotate2') data = self.pipeline.get_data('derotate2') assert np.mean(data) == pytest.approx(0.0861160094566323, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) data = self.pipeline.get_data('derotate2') assert np.mean(data) == pytest.approx(0.0861160094566323, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) module = DerotateAndStackModule(name_in='derotate_ifs1', image_in_tag='images_ifs', image_out_tag='derotate_ifs1', derotate=True, stack='mean', extra_rot=0., dimension='time') self.pipeline.add_module(module) self.pipeline.run_module('derotate_ifs1') data = self.pipeline.get_data('derotate_ifs1') assert np.mean(data) == pytest.approx(0.1884438996655355, rel=self.limit, abs=0.) assert data.shape == (3, 1, 21, 21) module = DerotateAndStackModule(name_in='derotate_ifs2', image_in_tag='images_ifs', image_out_tag='derotate_ifs2', derotate=False, stack='median', extra_rot=0., dimension='wavelength') self.pipeline.add_module(module) self.pipeline.run_module('derotate_ifs2') data = self.pipeline.get_data('derotate_ifs2') assert np.mean(data) == pytest.approx(0.055939644983170146, rel=self.limit, abs=0.) assert data.shape == (1, 10, 21, 21) module = DerotateAndStackModule(name_in='derotate_ifs3', image_in_tag='images_ifs', image_out_tag='derotate_ifs3', derotate=True, stack=None, extra_rot=0., dimension='wavelength') self.pipeline.add_module(module) self.pipeline.run_module('derotate_ifs3') data = self.pipeline.get_data('derotate_ifs3') assert np.mean(data) == pytest.approx(0.05653316989966066, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) def test_combine_tags(self) -> None: module = CombineTagsModule(image_in_tags=['images', 'extra'], check_attr=True, index_init=False, name_in='combine1', image_out_tag='combine1') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('combine1') assert len(warning) == 1 assert warning[0].message.args[0] == 'The non-static keyword FILES is already used but ' \ 'with different values. It is advisable to only ' \ 'combine tags that descend from the same data set.' data = self.pipeline.get_data('combine1') assert np.mean(data) == pytest.approx(0.0872254452876469, rel=self.limit, abs=0.) assert data.shape == (20, 11, 11) data = self.pipeline.get_data('header_combine1/INDEX') assert data[19] == 9 assert data.shape == (20, ) module = CombineTagsModule(image_in_tags=['images', 'extra'], check_attr=False, index_init=True, name_in='combine2', image_out_tag='combine2') self.pipeline.add_module(module) self.pipeline.run_module('combine2') data = self.pipeline.get_data('combine1') extra = self.pipeline.get_data('combine2') assert data == pytest.approx(extra, rel=self.limit, abs=0.) data = self.pipeline.get_data('header_combine2/INDEX') assert data[19] == 19 assert data.shape == (20, )
class TestPsfPreparation: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'prep') create_ifs_data(self.test_dir+'prep_ifs') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['prep', 'prep_ifs']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir+'prep') self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) module = FitsReadingModule(name_in='read_ifs', image_tag='read_ifs', input_dir=self.test_dir+'prep_ifs', ifs_data=True) self.pipeline.add_module(module) self.pipeline.run_module('read_ifs') data = self.pipeline.get_data('read_ifs') assert np.sum(data) == pytest.approx(749.8396528807369, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) def test_angle_interpolation(self) -> None: module = AngleInterpolationModule(name_in='angle1', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle1') data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(900., rel=self.limit, abs=0.) assert data.shape == (10, ) def test_angle_calculation(self) -> None: self.pipeline.set_attribute('read', 'LATITUDE', -25.) self.pipeline.set_attribute('read', 'LONGITUDE', -70.) self.pipeline.set_attribute('read', 'DIT', 1.) self.pipeline.set_attribute('read', 'RA', (90., 90., 90., 90.), static=False) self.pipeline.set_attribute('read', 'DEC', (-51., -51., -51., -51.), static=False) self.pipeline.set_attribute('read', 'PUPIL', (90., 90., 90., 90.), static=False) date = ('2012-12-01T07:09:00.0000', '2012-12-01T07:09:01.0000', '2012-12-01T07:09:02.0000', '2012-12-01T07:09:03.0000') self.pipeline.set_attribute('read', 'DATE', date, static=False) module = AngleCalculationModule(instrument='NACO', name_in='angle2', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle2') data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(-550.2338300130718, rel=self.limit, abs=0.) assert data.shape == (10, ) self.pipeline.set_attribute('read', 'RA', (60000.0, 60000.0, 60000.0, 60000.0), static=False) self.pipeline.set_attribute('read', 'DEC', (-510000., -510000., -510000., -510000.), static=False) module = AngleCalculationModule(instrument='SPHERE/IRDIS', name_in='angle3', data_tag='read') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('angle3') warning_0 = 'For SPHERE data it is recommended to use the header keyword \'ESO INS4 ' \ 'DROT2 RA\' to specify the object\'s right ascension. The input will be ' \ 'parsed accordingly. Using the regular \'RA\' keyword will lead to wrong ' \ 'parallactic angles.' warning_1 = 'For SPHERE data it is recommended to use the header keyword \'ESO INS4 ' \ 'DROT2 DEC\' to specify the object\'s declination. The input will be parsed ' \ 'accordingly. Using the regular \'DEC\' keyword will lead to wrong ' \ 'parallactic angles.' if len(warning) == 2: assert warning[0].message.args[0] == warning_0 assert warning[1].message.args[0] == warning_1 data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(1704.220236104952, rel=self.limit, abs=0.) assert data.shape == (10, ) module = AngleCalculationModule(instrument='SPHERE/IFS', name_in='angle4', data_tag='read') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('angle4') warning_0 = 'AngleCalculationModule has not been tested for SPHERE/IFS data.' warning_1 = 'For SPHERE data it is recommended to use the header keyword \'ESO INS4 ' \ 'DROT2 RA\' to specify the object\'s right ascension. The input will be ' \ 'parsed accordingly. Using the regular \'RA\' keyword will lead to wrong ' \ 'parallactic angles.' warning_2 = 'For SPHERE data it is recommended to use the header keyword \'ESO INS4 ' \ 'DROT2 DEC\' to specify the object\'s declination. The input will be parsed ' \ 'accordingly. Using the regular \'DEC\' keyword will lead to wrong ' \ 'parallactic angles.' if len(warning) == 3: assert warning[0].message.args[0] == warning_0 assert warning[1].message.args[0] == warning_1 assert warning[2].message.args[0] == warning_2 data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(-890.8506520762833, rel=self.limit, abs=0.) assert data.shape == (10, ) def test_angle_sort(self) -> None: index = self.pipeline.get_data('header_read/INDEX') self.pipeline.set_attribute('read', 'INDEX', index[::-1], static=False) module = SortParangModule(name_in='sort1', image_in_tag='read', image_out_tag='read_sorted') self.pipeline.add_module(module) self.pipeline.run_module('sort1') self.pipeline.set_attribute('read', 'INDEX', index, static=False) parang = self.pipeline.get_data('header_read/PARANG')[::-1] parang_sort = self.pipeline.get_data('header_read_sorted/PARANG') assert np.sum(parang) == pytest.approx(np.sum(parang_sort), rel=self.limit, abs=0.) parang_set = [0., 1., 2., 3., 4., 5., 6., 7., 8., 9.] self.pipeline.set_attribute('read_ifs', 'PARANG', parang_set, static=False) data = self.pipeline.get_data('read_sorted') assert np.sum(data[0]) == pytest.approx(9.71156815235485, rel=self.limit, abs=0.) def test_angle_sort_ifs(self) -> None: index = self.pipeline.get_data('header_read_ifs/INDEX') self.pipeline.set_attribute('read_ifs', 'INDEX', index[::-1], static=False) module = SortParangModule(name_in='sort2', image_in_tag='read_ifs', image_out_tag='read_ifs_sorted') self.pipeline.add_module(module) self.pipeline.run_module('sort2') self.pipeline.set_attribute('read_ifs', 'INDEX', index, static=False) parang = self.pipeline.get_data('header_read_ifs/PARANG')[::-1] parang_sort = self.pipeline.get_data('header_read_ifs_sorted/PARANG') assert np.sum(parang) == pytest.approx(np.sum(parang_sort), rel=self.limit, abs=0.) data = self.pipeline.get_data('read_ifs_sorted') assert np.sum(data[0, 0]) == pytest.approx(21.185139976163477, rel=self.limit, abs=0.) def test_angle_interpolation_mismatch(self) -> None: self.pipeline.set_attribute('read', 'NDIT', [9, 9, 9, 9], static=False) module = AngleInterpolationModule(name_in='angle5', data_tag='read') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('angle5') warning_0 = 'There is a mismatch between the NDIT and NFRAMES values. The parallactic ' \ 'angles are calculated with a linear interpolation by using NFRAMES steps. ' \ 'A frame selection should be applied after the parallactic angles are ' \ 'calculated.' if len(warning) == 1: assert warning[0].message.args[0] == warning_0 data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(900., rel=self.limit, abs=0.) assert data.shape == (10, ) def test_psf_preparation_norm_mask(self) -> None: module = PSFpreparationModule(name_in='prep1', image_in_tag='read', image_out_tag='prep1', mask_out_tag='mask1', norm=True, cent_size=0.1, edge_size=1.0) self.pipeline.add_module(module) self.pipeline.run_module('prep1') data = self.pipeline.get_data('prep1') assert np.sum(data) == pytest.approx(-1.5844830188044685, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) data = self.pipeline.get_data('mask1') assert np.sum(data) == pytest.approx(52, rel=self.limit, abs=0.) assert data.shape == (11, 11) def test_psf_preparation_none(self) -> None: module = PSFpreparationModule(name_in='prep2', image_in_tag='read', image_out_tag='prep2', mask_out_tag='mask2', norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(module) self.pipeline.run_module('prep2') data = self.pipeline.get_data('prep2') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_psf_preparation_no_mask_out(self) -> None: module = PSFpreparationModule(name_in='prep3', image_in_tag='read', image_out_tag='prep3', mask_out_tag=None, norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(module) self.pipeline.run_module('prep3') data = self.pipeline.get_data('prep3') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_psf_preparation_sdi(self) -> None: module = PSFpreparationModule(name_in='prep4', image_in_tag='read_ifs', image_out_tag='prep4', mask_out_tag=None, norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(module) self.pipeline.run_module('prep4') data = self.pipeline.get_data('prep4') assert np.sum(data) == pytest.approx(749.8396528807369, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) def test_sdi_preparation(self) -> None: module = SDIpreparationModule(name_in='sdi', wavelength=(0.65, 0.6), width=(0.1, 0.5), image_in_tag='read', image_out_tag='sdi') self.pipeline.add_module(module) self.pipeline.run_module('sdi') data = self.pipeline.get_data('sdi') assert np.sum(data) == pytest.approx(21.084666133914183, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) attribute = self.pipeline.get_attribute('sdi', 'History: SDIpreparationModule') assert attribute == '(line, continuum) = (0.65, 0.6)'
class TestPsfSubtractionSdi: def setup_class(self) -> None: self.limit = 1e-5 self.test_dir = os.path.dirname(__file__) + '/' create_ifs_data(self.test_dir + 'science') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['science']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='science', input_dir=self.test_dir + 'science', ifs_data=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('science') assert np.sum(data) == pytest.approx(749.8396528807368, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) self.pipeline.set_attribute('science', 'WAVELENGTH', [1., 1.1, 1.2], static=False) self.pipeline.set_attribute('science', 'PARANG', np.linspace(0., 180., 10), static=False) def test_psf_subtraction_sdi(self) -> None: processing_types = ['ADI', 'SDI+ADI', 'ADI+SDI', 'CODI'] expected = [[ -0.16718942968552664, -0.790697125718532, 19.507979777136892, -0.21617058715490922 ], [ -0.001347198747121658, -0.08621264803633322, 2.3073192270025333, -0.010269745733878437 ], [ 0.009450917836998779, -0.05776205365084376, -0.43506678222476264, 0.0058856438951644455 ], [ -0.2428739554898396, -0.5069023645693083, 9.326414176548905, 0.00 ]] shape_expc = [(2, 3, 21, 21), (2, 2, 3, 21, 21), (1, 1, 3, 21, 21), (2, 3, 21, 21)] pca_numbers = [ range(1, 3), (range(1, 3), range(1, 3)), ([1], [1]), range(1, 3) ] res_arr_tags = [None, None, 'res_arr_single_sdi_ADI+SDI', None] for i, p_type in enumerate(processing_types): module = PcaPsfSubtractionModule( pca_numbers=pca_numbers[i], name_in='pca_single_sdi_' + p_type, images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_single_sdi_' + p_type, res_median_tag='res_median_single_sdi_' + p_type, res_weighted_tag='res_weighted_single_sdi_' + p_type, res_rot_mean_clip_tag='res_clip_single_sdi_' + p_type, res_arr_out_tag=res_arr_tags[i], basis_out_tag='basis_single_sdi_' + p_type, extra_rot=0., subtract_mean=True, processing_type=p_type) self.pipeline.add_module(module) self.pipeline.run_module('pca_single_sdi_' + p_type) data = self.pipeline.get_data('res_mean_single_sdi_' + p_type) assert np.sum(data) == pytest.approx(expected[i][0], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_median_single_sdi_' + p_type) assert np.sum(data) == pytest.approx(expected[i][1], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_weighted_single_sdi_' + p_type) assert np.sum(data) == pytest.approx(expected[i][2], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_clip_single_sdi_' + p_type) # assert np.sum(data) == pytest.approx(expected[i][3], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] # data = self.pipeline.get_data('basis_single_sdi_'+p_type) # assert np.sum(data) == pytest.approx(-1.3886119555248766, rel=self.limit, abs=0.) # assert data.shape == (5, 30, 30) def test_psf_subtraction_sdi_multi(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 processing_types = ['SDI', 'ADI+SDI'] pca_numbers = [range(1, 3), (range(1, 3), range(1, 3))] expected = [[ -0.004159475403024583, 0.02613693149969979, -0.12940723035023394, -0.008432530081399985 ], [ -0.006580571531064533, -0.08171546066331437, 0.5700432018961117, -0.014527353460544753 ]] shape_expc = [(2, 3, 21, 21), (2, 2, 3, 21, 21)] for i, p_type in enumerate(processing_types): module = PcaPsfSubtractionModule( pca_numbers=pca_numbers[i], name_in='pca_multi_sdi_' + p_type, images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_multi_sdi_' + p_type, res_median_tag='res_median_multi_sdi_' + p_type, res_weighted_tag='res_weighted_multi_sdi_' + p_type, res_rot_mean_clip_tag='res_clip_multi_sdi_' + p_type, res_arr_out_tag=None, basis_out_tag=None, extra_rot=0., subtract_mean=True, processing_type=p_type) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi_sdi_' + p_type) data = self.pipeline.get_data('res_mean_multi_sdi_' + p_type) assert np.sum(data) == pytest.approx(expected[i][0], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_median_multi_sdi_' + p_type) assert np.sum(data) == pytest.approx(expected[i][1], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_weighted_multi_sdi_' + p_type) assert np.sum(data) == pytest.approx(expected[i][2], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_clip_multi_sdi_' + p_type) # assert np.sum(data) == pytest.approx(expected[i][3], rel=self.limit, abs=0.) assert data.shape == shape_expc[i]
class TestPsfSubtractionAdi: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_fake_data(self.test_dir + 'science') create_fake_data(self.test_dir + 'reference') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['science', 'reference']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='science', input_dir=self.test_dir + 'science') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('science') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) self.pipeline.set_attribute('science', 'PARANG', np.linspace(0., 180., 10), static=False) module = FitsReadingModule(name_in='read2', image_tag='reference', input_dir=self.test_dir + 'reference') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('reference') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_psf_preparation(self) -> None: module = PSFpreparationModule(name_in='prep1', image_in_tag='science', image_out_tag='science_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.05, edge_size=1.) self.pipeline.add_module(module) self.pipeline.run_module('prep1') data = self.pipeline.get_data('science_prep') assert np.sum(data) == pytest.approx(5.029285028467547, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) module = PSFpreparationModule(name_in='prep2', image_in_tag='reference', image_out_tag='reference_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.05, edge_size=1.) self.pipeline.add_module(module) self.pipeline.run_module('prep2') data = self.pipeline.get_data('reference_prep') assert np.sum(data) == pytest.approx(5.029285028467547, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_classical_adi(self) -> None: module = ClassicalADIModule(threshold=None, nreference=None, residuals='mean', extra_rot=0., name_in='cadi1', image_in_tag='science_prep', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi1') data = self.pipeline.get_data('cadi_res') assert np.sum(data) == pytest.approx(0.8381625719865213, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('cadi_stack') assert np.sum(data) == pytest.approx(0.08395606034388256, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) def test_classical_adi_threshold(self) -> None: module = ClassicalADIModule(threshold=(0.1, 0.03, 1.), nreference=5, residuals='median', extra_rot=0., name_in='cadi2', image_in_tag='science_prep', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi2') data = self.pipeline.get_data('cadi_res') assert np.sum(data) == pytest.approx(0.7158207863548083, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('cadi_stack') assert np.sum(data) == pytest.approx(0.07448334552227256, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) def test_psf_subtraction_pca_single(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_single', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_single', res_median_tag='res_median_single', res_weighted_tag='res_weighted_single', res_rot_mean_clip_tag='res_clip_single', res_arr_out_tag='res_arr_single', basis_out_tag='basis_single', extra_rot=45., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_single') data = self.pipeline.get_data('res_mean_single') assert np.sum(data) == pytest.approx(-0.00011857022709778602, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_median_single') assert np.sum(data) == pytest.approx(-0.002184868916566093, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_weighted_single') assert np.sum(data) == pytest.approx(0.08102176735226937, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) # data = self.pipeline.get_data('res_clip_single') # assert np.sum(data) == pytest.approx(7.09495495339349e-05, rel=self.limit, abs=0.) # assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_arr_single1') assert np.sum(data) == pytest.approx(-0.0002751385418691618, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('basis_single') assert np.sum(data) == pytest.approx(0.09438697731322143, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_no_mean', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean') data = self.pipeline.get_data('res_mean_no_mean') assert np.sum(data) == pytest.approx(0.0006081272007585688, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_no_mean') assert np.sum(data) == pytest.approx(5.118005177367776, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref') data = self.pipeline.get_data('res_mean_ref') assert np.sum(data) == pytest.approx(0.0006330226118859073, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref') assert np.sum(data) == pytest.approx(0.0943869773132221, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref_no_mean', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean') data = self.pipeline.get_data('res_mean_ref_no_mean') assert np.sum(data) == pytest.approx(0.0006081272007585764, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref_no_mean') assert np.sum(data) == pytest.approx(5.118005177367774, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_pca_single_mask(self) -> None: pca = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_single_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_single_mask', res_median_tag='res_median_single_mask', res_weighted_tag='res_weighted_single_mask', res_rot_mean_clip_tag='res_clip_single_mask', res_arr_out_tag='res_arr_single_mask', basis_out_tag='basis_single_mask', extra_rot=45., subtract_mean=True) self.pipeline.add_module(pca) self.pipeline.run_module('pca_single_mask') data = self.pipeline.get_data('res_mean_single_mask') assert np.sum(data) == pytest.approx(0.00010696166038626307, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_median_single_mask') assert np.sum(data) == pytest.approx(-0.0021005307611346156, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_weighted_single_mask') assert np.sum(data) == pytest.approx(0.06014309988789256, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_clip_single_mask') # assert np.sum(data) == pytest.approx(9.35120662148806e-05, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_arr_single_mask1') assert np.sum(data) == pytest.approx(0.0006170872862547557, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('basis_single_mask') assert np.sum(data) == pytest.approx(0.08411251293842359, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_no_mean_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean_mask') data = self.pipeline.get_data('res_mean_no_mean_mask') assert np.sum(data) == pytest.approx(2.3542359949502915e-05, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_no_mean_mask') assert np.sum(data) == pytest.approx(5.655460951633232, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_mask', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_mask') data = self.pipeline.get_data('res_mean_ref_mask') assert np.sum(data) == pytest.approx(9.400558926815758e-06, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref_mask') assert np.sum(data) == pytest.approx(0.08411251293842326, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_ref_no_mean_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean_mask') data = self.pipeline.get_data('res_mean_ref_no_mean_mask') assert np.sum(data) == pytest.approx(2.354235994950671e-05, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref_no_mean_mask') assert np.sum(data) == pytest.approx(5.655460951633233, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_pca_multi(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_multi', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_multi', res_median_tag='res_median_multi', res_weighted_tag='res_weighted_multi', res_rot_mean_clip_tag='res_clip_multi', res_arr_out_tag=None, basis_out_tag='basis_multi', extra_rot=45., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi') data_single = self.pipeline.get_data('res_mean_single') data_multi = self.pipeline.get_data('res_mean_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_median_single') data_multi = self.pipeline.get_data('res_median_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_weighted_single') data_multi = self.pipeline.get_data('res_weighted_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('basis_single') data_multi = self.pipeline.get_data('basis_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) def test_psf_subtraction_pca_multi_mask(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 3), name_in='pca_multi_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_multi_mask', res_median_tag='res_median_multi_mask', res_weighted_tag='res_weighted_multi_mask', res_rot_mean_clip_tag='res_clip_multi_mask', res_arr_out_tag=None, basis_out_tag='basis_multi_mask', extra_rot=45., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi_mask') data_single = self.pipeline.get_data('res_mean_single_mask') data_multi = self.pipeline.get_data('res_mean_multi_mask') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_median_single_mask') data_multi = self.pipeline.get_data('res_median_multi_mask') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_weighted_single_mask') data_multi = self.pipeline.get_data('res_weighted_multi_mask') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('basis_single_mask') data_multi = self.pipeline.get_data('basis_multi_mask') assert data_single.shape == data_multi.shape assert data_single == pytest.approx(data_multi, rel=self.limit, abs=0.) def test_psf_subtraction_len_parang(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 1 parang = self.pipeline.get_data('header_science/PARANG') self.pipeline.set_attribute('science_prep', 'PARANG', np.append(parang, 0.), static=False) module = PcaPsfSubtractionModule(pca_numbers=[ 1, ], name_in='pca_len_parang', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_len_parang', extra_rot=0.) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('pca_len_parang') assert str(error.value) == 'The number of images (10) is not equal to the number of ' \ 'parallactic angles (11).'
class TestFitsWritingModule: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'fits') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): files = [ 'test.fits', 'test000.fits', 'test001.fits', 'test002.fits', 'test003.fits' ] remove_test_data(self.test_dir, folders=['fits'], files=files) def test_fits_reading(self): module = FitsReadingModule(name_in='read', input_dir=self.test_dir + 'fits', image_tag='images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('images') assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_fits_writing(self): module = FitsWritingModule(file_name='test.fits', name_in='write1', output_dir=None, data_tag='images', data_range=None, overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('write1') def test_filename_extension(self): with pytest.raises(ValueError) as error: FitsWritingModule(file_name='test.dat', name_in='write3', output_dir=None, data_tag='images', data_range=None, overwrite=True, subset_size=None) assert str( error.value) == 'Output \'file_name\' requires the FITS extension.' def test_data_range(self): module = FitsWritingModule(file_name='test.fits', name_in='write4', output_dir=None, data_tag='images', data_range=(0, 10), overwrite=True, subset_size=None) self.pipeline.add_module(module) self.pipeline.run_module('write4') def test_not_overwritten(self): module = FitsWritingModule(file_name='test.fits', name_in='write5', output_dir=None, data_tag='images', data_range=None, overwrite=False, subset_size=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('write5') assert len(warning) == 1 assert warning[0].message.args[0] == 'Filename already present. Use overwrite=True ' \ 'to overwrite an existing FITS file.' def test_subset_size(self): module = FitsWritingModule(file_name='test.fits', name_in='write6', output_dir=None, data_tag='images', data_range=None, overwrite=True, subset_size=10) self.pipeline.add_module(module) self.pipeline.run_module('write6') def test_subset_size_data_range(self): module = FitsWritingModule(file_name='test.fits', name_in='write7', output_dir=None, data_tag='images', data_range=(8, 18), overwrite=True, subset_size=10) self.pipeline.add_module(module) self.pipeline.run_module('write7') def test_attribute_length(self): text = 'long_text_long_text_long_text_long_text_long_text_long_text_long_text_long_text' self.pipeline.set_attribute('images', 'short', 'value', static=True) self.pipeline.set_attribute('images', 'longer_than_eight1', 'value', static=True) self.pipeline.set_attribute('images', 'longer_than_eight2', text, static=True) module = FitsWritingModule(file_name='test.fits', name_in='write8', output_dir=None, data_tag='images', data_range=None, overwrite=True, subset_size=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('write8') assert len(warning) == 1 assert warning[0].message.args[0] == 'Key \'hierarch longer_than_eight2\' with value ' \ '\'long_text_long_text_long_text_long_text_long_' \ 'text_long_text_long_text_long_text\' is too ' \ 'long for the FITS format. To avoid an error, ' \ 'the value was truncated to \'long_text_long_text' \ '_long_text_long_text_long_tex\'.'
class TestFluxPosition: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'flux', npix_x=101, npix_y=101) create_star_data(path=self.test_dir + 'psf', npix_x=15, npix_y=15, x0=[7., 7., 7., 7.], y0=[7., 7., 7., 7.], ndit=1, nframes=1, noise=False) create_fake(path=self.test_dir + 'adi', ndit=[5, 5, 5, 5], nframes=[5, 5, 5, 5], exp_no=[1, 2, 3, 4], npix=(15, 15), fwhm=3., x0=[7., 7., 7., 7.], y0=[7., 7., 7., 7.], angles=[[0., 50.], [50., 100.], [100., 150.], [150., 200.]], sep=5.5, contrast=1.) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['flux', 'adi', 'psf']) def test_read_data(self): module = FitsReadingModule(name_in='read1', image_tag='read', input_dir=self.test_dir + 'flux') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('read') assert np.allclose(data[0, 50, 50], 0.0986064357966972, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.827812356946396e-05, rtol=limit, atol=0.) assert data.shape == (40, 101, 101) module = FitsReadingModule(name_in='read2', image_tag='adi', input_dir=self.test_dir + 'adi') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('adi') assert np.allclose(data[0, 7, 7], 0.09823888178122618, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.008761678820997612, rtol=limit, atol=0.) assert data.shape == (20, 15, 15) module = FitsReadingModule(name_in='read3', image_tag='psf', input_dir=self.test_dir + 'psf') self.pipeline.add_module(module) self.pipeline.run_module('read3') data = self.pipeline.get_data('psf') assert np.allclose(data[0, 7, 7], 0.09806026673451182, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444444429123135, rtol=limit, atol=0.) assert data.shape == (4, 15, 15) def test_aperture_photometry(self): with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = AperturePhotometryModule(radius=0.1, position=None, name_in='photometry', image_in_tag='read', phot_out_tag='photometry') self.pipeline.add_module(module) self.pipeline.run_module('photometry') with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = AperturePhotometryModule(radius=0.1, position=None, name_in='photometry_multi', image_in_tag='read', phot_out_tag='photometry_multi') self.pipeline.add_module(module) self.pipeline.run_module('photometry_multi') data = self.pipeline.get_data('photometry') assert np.allclose(data[0][0], 0.9853286992326858, rtol=limit, atol=0.) assert np.allclose(data[39][0], 0.9835251375574492, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.9836439188900222, rtol=limit, atol=0.) assert data.shape == (40, 1) data_multi = self.pipeline.get_data('photometry_multi') assert data.shape == data_multi.shape # Outputs zeros sometimes for data_multi on Travis CI # for i, item in enumerate(data_multi): # assert np.allclose(data[i], item, rtol=1e-6, atol=0.) def test_angle_interpolation(self): with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = AngleInterpolationModule(name_in='angle', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle') data = self.pipeline.get_data('header_read/PARANG') assert data[5] == 2.7777777777777777 assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_fake_planet(self): module = FakePlanetModule(position=(0.5, 90.), magnitude=6., psf_scaling=1., interpolation='spline', name_in='fake', image_in_tag='read', psf_in_tag='read', image_out_tag='fake') self.pipeline.add_module(module) self.pipeline.run_module('fake') data = self.pipeline.get_data('fake') assert np.allclose(data[0, 50, 50], 0.09860622347589054, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.867026482551375e-05, rtol=limit, atol=0.) assert data.shape == (40, 101, 101) def test_psf_subtraction(self): module = PcaPsfSubtractionModule(pca_numbers=(2, ), name_in='pca', images_in_tag='fake', reference_in_tag='fake', res_mean_tag='res_mean', res_median_tag=None, res_arr_out_tag=None, res_rot_mean_clip_tag=None, extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('pca') data = self.pipeline.get_data('res_mean') assert np.allclose(data[0, 49, 31], 4.8963214463463886e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.8409659677297164e-08, rtol=limit, atol=0.) assert data.shape == (1, 101, 101) def test_false_positive(self): module = FalsePositiveModule(position=(31., 49.), aperture=0.1, ignore=True, name_in='false', image_in_tag='res_mean', snr_out_tag='snr_fpf') self.pipeline.add_module(module) self.pipeline.run_module('false') data = self.pipeline.get_data('snr_fpf') assert np.allclose(data[0, 0], 31.0, rtol=limit, atol=0.) assert np.allclose(data[0, 1], 49.0, rtol=limit, atol=0.) assert np.allclose(data[0, 2], 0.513710034941892, rtol=limit, atol=0.) assert np.allclose(data[0, 3], 93.01278750418334, rtol=limit, atol=0.) assert np.allclose(data[0, 4], 7.333740467578795, rtol=limit, atol=0.) assert np.allclose(data[0, 5], 4.5257622875993775e-06, rtol=limit, atol=0.) def test_simplex_minimization(self): module = SimplexMinimizationModule(position=(31., 49.), magnitude=6., psf_scaling=-1., name_in='simplex', image_in_tag='fake', psf_in_tag='read', res_out_tag='simplex_res', flux_position_tag='flux_position', merit='hessian', aperture=0.1, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.1, edge_size=None, extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('simplex') data = self.pipeline.get_data('simplex_res') assert np.allclose(data[0, 50, 31], 0.00012976212788352575, rtol=limit, atol=0.) assert np.allclose(data[42, 50, 31], 1.2141761821389107e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.461337432531517e-09, rtol=limit, atol=0.) assert data.shape == (43, 101, 101) data = self.pipeline.get_data('flux_position') assert np.allclose(data[42, 0], 31.6456737445356, rtol=limit, atol=0.) assert np.allclose(data[42, 1], 49.9199601480223, rtol=limit, atol=0.) assert np.allclose(data[42, 2], 0.49557152090327206, rtol=limit, atol=0.) assert np.allclose(data[42, 3], 90.24985480686087, rtol=limit, atol=0.) assert np.allclose(data[42, 4], 5.683191873535635, rtol=limit, atol=0.) assert data.shape == (43, 6) def test_mcmc_sampling_gaussian(self): self.pipeline.set_attribute('adi', 'PARANG', np.arange(0., 200., 10.), static=False) module = ScaleImagesModule(scaling=(None, None, 100.), pixscale=False, name_in='scale1', image_in_tag='adi', image_out_tag='adi_scale') self.pipeline.add_module(module) self.pipeline.run_module('scale1') data = self.pipeline.get_data('adi_scale') assert np.allclose(data[0, 7, 7], 9.82388817812263, rtol=limit, atol=0.) assert data.shape == (20, 15, 15) module = ScaleImagesModule(scaling=(None, None, 100.), pixscale=False, name_in='scale2', image_in_tag='psf', image_out_tag='psf_scale') self.pipeline.add_module(module) self.pipeline.run_module('scale2') data = self.pipeline.get_data('psf_scale') assert np.allclose(data[0, 7, 7], 9.806026673451198, rtol=limit, atol=0.) assert data.shape == (4, 15, 15) module = DerotateAndStackModule(name_in='take_psf_avg', image_in_tag='psf_scale', image_out_tag='psf_avg', derotate=False, stack='mean') self.pipeline.add_module(module) self.pipeline.run_module('take_psf_avg') data = self.pipeline.get_data('psf_avg') assert data.shape == (1, 15, 15) module = MCMCsamplingModule(param=(0.1485, 0., 0.), bounds=((0.1, 0.25), (-5., 5.), (-0.5, 0.5)), name_in='mcmc', image_in_tag='adi_scale', psf_in_tag='psf_avg', chain_out_tag='mcmc', nwalkers=50, nsteps=150, psf_scaling=-1., pca_number=1, aperture={ 'type': 'circular', 'pos_x': 7.0, 'pos_y': 12.5, 'radius': 0.1 }, mask=None, extra_rot=0., scale=2., sigma=(1e-3, 1e-1, 1e-2), prior='flat', variance='gaussian') self.pipeline.add_module(module) with pytest.warns(FutureWarning) as warning: self.pipeline.run_module('mcmc') assert warning[0].message.args[0] == 'Using a non-tuple sequence for multidimensional ' \ 'indexing is deprecated; use `arr[tuple(seq)]` ' \ 'instead of `arr[seq]`. In the future this will be ' \ 'interpreted as an array index, ' \ '`arr[np.array(seq)]`, which will result either ' \ 'in an error or a different result.' single = self.pipeline.get_data('mcmc') single = single[:, 20:, :].reshape((-1, 3)) assert np.allclose(np.median(single[:, 0]), 0.148, rtol=0., atol=0.01) assert np.allclose(np.median(single[:, 1]), 0., rtol=0., atol=0.2) assert np.allclose(np.median(single[:, 2]), 0., rtol=0., atol=0.1) def test_mcmc_sampling_poisson(self): module = MCMCsamplingModule(param=(0.1485, 0., 0.), bounds=((0.1, 0.25), (-5., 5.), (-0.5, 0.5)), name_in='mcmc_prior', image_in_tag='adi_scale', psf_in_tag='psf_avg', chain_out_tag='mcmc_prior', nwalkers=50, nsteps=150, psf_scaling=-1., pca_number=1, aperture={ 'type': 'elliptical', 'pos_x': 7.0, 'pos_y': 12.5, 'semimajor': 0.1, 'semiminor': 0.1, 'angle': 0.0 }, mask=None, extra_rot=0., scale=2., sigma=(1e-3, 1e-1, 1e-2), prior='aperture', variance='poisson') self.pipeline.add_module(module) with pytest.warns(FutureWarning) as warning: self.pipeline.run_module('mcmc_prior') assert warning[0].message.args[0] == 'Using a non-tuple sequence for multidimensional ' \ 'indexing is deprecated; use `arr[tuple(seq)]` ' \ 'instead of `arr[seq]`. In the future this will be ' \ 'interpreted as an array index, ' \ '`arr[np.array(seq)]`, which will result either ' \ 'in an error or a different result.' single = self.pipeline.get_data('mcmc_prior') single = single[:, 20:, :].reshape((-1, 3)) assert np.allclose(np.median(single[:, 0]), 0.148, rtol=0., atol=0.01) assert np.allclose(np.median(single[:, 1]), 0., rtol=0., atol=0.2) assert np.allclose(np.median(single[:, 2]), 0., rtol=0., atol=0.1) def test_mcmc_sampling_wrong_prior(self): module = MCMCsamplingModule(param=(0.1485, 0., 0.), bounds=((0.1, 0.25), (-5., 5.), (-0.5, 0.5)), name_in='mcmc_wrong_prior', image_in_tag='adi_scale', psf_in_tag='psf_avg', chain_out_tag='mcmc_prior', nwalkers=50, nsteps=150, psf_scaling=-1., pca_number=1, aperture={ 'type': 'circular', 'pos_x': 7.0, 'pos_y': 12.5, 'radius': 0.1 }, mask=None, extra_rot=0., scale=2., sigma=(1e-3, 1e-1, 1e-2), prior='test', variance='gaussian') self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('mcmc_wrong_prior') assert str(error.value) == 'Prior type not recognized.'
class TestPSFpreparation(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir + "prep") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["prep"]) def test_read_data(self): read = FitsReadingModule(name_in="read", image_tag="read", input_dir=self.test_dir + "prep") self.pipeline.add_module(read) self.pipeline.run_module("read") data = self.pipeline.get_data("read") assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_angle_interpolation(self): angle = AngleInterpolationModule(name_in="angle1", data_tag="read") self.pipeline.add_module(angle) self.pipeline.run_module("angle1") data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], 0., rtol=limit, atol=0.) assert np.allclose(data[15], 7.777777777777778, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_angle_calculation(self): self.pipeline.set_attribute("read", "LATITUDE", -25.) self.pipeline.set_attribute("read", "LONGITUDE", -70.) self.pipeline.set_attribute("read", "DIT", 1.) self.pipeline.set_attribute("read", "RA", (90., 90., 90., 90.), static=False) self.pipeline.set_attribute("read", "DEC", (-51., -51., -51., -51.), static=False) self.pipeline.set_attribute("read", "PUPIL", (90., 90., 90., 90.), static=False) date = ("2012-12-01T07:09:00.0000", "2012-12-01T07:09:01.0000", "2012-12-01T07:09:02.0000", "2012-12-01T07:09:03.0000") self.pipeline.set_attribute("read", "DATE", date, static=False) angle = AngleCalculationModule(instrument="NACO", name_in="angle2", data_tag="read") self.pipeline.add_module(angle) self.pipeline.run_module("angle2") data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], -55.041097524594186, rtol=limit, atol=0.) assert np.allclose(np.mean(data), -54.99858342139904, rtol=limit, atol=0.) assert data.shape == (40, ) self.pipeline.set_attribute("read", "RA", (60000.0, 60000.0, 60000.0, 60000.0), static=False) self.pipeline.set_attribute("read", "DEC", (-510000., -510000., -510000., -510000.), static=False) angle = AngleCalculationModule(instrument="SPHERE/IRDIS", name_in="angle3", data_tag="read") self.pipeline.add_module(angle) self.pipeline.run_module("angle3") data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], 170.39102733657813, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 170.46341141667205, rtol=limit, atol=0.) assert data.shape == (40, ) angle = AngleCalculationModule(instrument="SPHERE/IFS", name_in="angle4", data_tag="read") self.pipeline.add_module(angle) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("angle4") assert len(warning) == 2 assert warning[0].message.args[0] == "AngleCalculationModule has not been tested for " \ "SPHERE/IFS data." assert warning[1].message.args[0] == "For SPHERE data it is recommended to use the " \ "header keywords \"ESO INS4 DROT2 RA/DEC\" to " \ "specify the object's position. The input will be " \ "parsed accordingly. Using the regular RA/DEC "\ "parameters will lead to wrong parallactic angles." \ data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], -89.12897266342185, rtol=limit, atol=0.) assert np.allclose(np.mean(data), -89.02755900320116, rtol=limit, atol=0.) assert data.shape == (40, ) def test_angle_interpolation_mismatch(self): self.pipeline.set_attribute("read", "NDIT", [9, 9, 9, 9], static=False) angle = AngleInterpolationModule(name_in="angle5", data_tag="read") self.pipeline.add_module(angle) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("angle5") assert len(warning) == 1 assert warning[0].message.args[0] == "There is a mismatch between the NDIT and NFRAMES " \ "values. The derotation angles are calculated with " \ "a linear interpolation by using NFRAMES steps. A " \ "frame selection should be applied after the " \ "derotation angles are calculated." data = self.pipeline.get_data("header_read/PARANG") assert np.allclose(data[0], 0., rtol=limit, atol=0.) assert np.allclose(data[15], 7.777777777777778, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_psf_preparation_norm_mask(self): prep = PSFpreparationModule(name_in="prep1", image_in_tag="read", image_out_tag="prep1", mask_out_tag="mask1", norm=True, cent_size=0.1, edge_size=1.0) self.pipeline.add_module(prep) self.pipeline.run_module("prep1") data = self.pipeline.get_data("prep1") assert np.allclose(data[0, 0, 0], 0., rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], 0., rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0001690382058762809, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) data = self.pipeline.get_data("mask1") assert np.allclose(data[0, 0], 0., rtol=limit, atol=0.) assert np.allclose(data[99, 99], 0., rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.4268, rtol=limit, atol=0.) assert data.shape == (100, 100) def test_psf_preparation_none(self): prep = PSFpreparationModule(name_in="prep2", image_in_tag="read", image_out_tag="prep2", mask_out_tag="mask2", norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(prep) self.pipeline.run_module("prep2") data = self.pipeline.get_data("prep2") assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], -0.000287573978535779, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_psf_preparation_no_mask_out(self): prep = PSFpreparationModule(name_in="prep3", image_in_tag="read", image_out_tag="prep3", mask_out_tag=None, norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(prep) self.pipeline.run_module("prep3") data = self.pipeline.get_data("prep3") assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], -0.000287573978535779, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_sdi_preparation(self): sdi = SDIpreparationModule(name_in="sdi", wavelength=(0.65, 0.6), width=(0.1, 0.5), image_in_tag="read", image_out_tag="sdi") self.pipeline.add_module(sdi) self.pipeline.run_module("sdi") data = self.pipeline.get_data("sdi") assert np.allclose(data[0, 25, 25], -2.6648118007008814e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 2.0042892634995876e-05, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) attribute = self.pipeline.get_attribute( "sdi", "History: SDIpreparationModule") assert attribute == "(line, continuum) = (0.65, 0.6)"
class TestStackingAndSubsampling(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir+"data") create_star_data(path=self.test_dir+"extra") create_config(self.test_dir+"PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["data", "extra"]) def test_read_data(self): read = FitsReadingModule(name_in="read1", image_tag="images", input_dir=self.test_dir+"data", overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read1") data = self.pipeline.get_data("images") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) read = FitsReadingModule(name_in="read2", image_tag="extra", input_dir=self.test_dir+"extra", overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read2") extra = self.pipeline.get_data("extra") assert np.allclose(data, extra, rtol=limit, atol=0.) def test_stack_and_subset(self): self.pipeline.set_attribute("images", "PARANG", np.arange(1., 41., 1.), static=False) stack = StackAndSubsetModule(name_in="stack", image_in_tag="images", image_out_tag="stack", random=10, stacking=2) self.pipeline.add_module(stack) self.pipeline.run_module("stack") data = self.pipeline.get_data("stack") assert np.allclose(data[0, 50, 50], 0.09816320034649725, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.983545774937238e-05, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) data = self.pipeline.get_data("header_stack/INDEX") index = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] assert np.allclose(data, index, rtol=limit, atol=0.) assert data.shape == (10, ) data = self.pipeline.get_data("header_stack/PARANG") parang = [1.5, 15.5, 19.5, 23.5, 25.5, 29.5, 31.5, 35.5, 37.5, 39.5] assert np.allclose(data, parang, rtol=limit, atol=0.) assert data.shape == (10, ) def test_mean_cube(self): mean = MeanCubeModule(name_in="mean", image_in_tag="images", image_out_tag="mean") self.pipeline.add_module(mean) self.pipeline.run_module("mean") data = self.pipeline.get_data("mean") assert np.allclose(data[0, 50, 50], 0.09805840100024205, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738069, rtol=limit, atol=0.) assert data.shape == (4, 100, 100) attribute = self.pipeline.get_attribute("mean", "INDEX", static=False) assert np.allclose(np.mean(attribute), 1.5, rtol=limit, atol=0.) assert attribute.shape == (4, ) attribute = self.pipeline.get_attribute("mean", "NFRAMES", static=False) assert np.allclose(np.mean(attribute), 1, rtol=limit, atol=0.) assert attribute.shape == (4, ) def test_derotate_and_stack(self): derotate = DerotateAndStackModule(name_in="derotate1", image_in_tag="images", image_out_tag="derotate1", derotate=True, stack="mean", extra_rot=10.) self.pipeline.add_module(derotate) self.pipeline.run_module("derotate1") data = self.pipeline.get_data("derotate1") assert np.allclose(data[50, 50], 0.09689679769268554, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010021671152246617, rtol=limit, atol=0.) assert data.shape == (100, 100) derotate = DerotateAndStackModule(name_in="derotate2", image_in_tag="images", image_out_tag="derotate2", derotate=False, stack="median", extra_rot=0.) self.pipeline.add_module(derotate) self.pipeline.run_module("derotate2") data = self.pipeline.get_data("derotate2") assert np.allclose(data[50, 50], 0.09809001768003645, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010033064394962, rtol=limit, atol=0.) assert data.shape == (100, 100) def test_combine_tags(self): combine = CombineTagsModule(image_in_tags=("images", "extra"), check_attr=True, index_init=False, name_in="combine1", image_out_tag="combine1") self.pipeline.add_module(combine) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("combine1") assert len(warning) == 1 assert warning[0].message.args[0] == "The non-static keyword FILES is already used but " \ "with different values. It is advisable to only " \ "combine tags that descend from the same data set." data = self.pipeline.get_data("combine1") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738068, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data("header_combine1/INDEX") assert data[40] == 0 assert data.shape == (80, ) combine = CombineTagsModule(image_in_tags=("images", "extra"), check_attr=False, index_init=True, name_in="combine2", image_out_tag="combine2") self.pipeline.add_module(combine) self.pipeline.run_module("combine2") data = self.pipeline.get_data("combine1") extra = self.pipeline.get_data("combine2") assert np.allclose(data, extra, rtol=limit, atol=0.) data = self.pipeline.get_data("header_combine2/INDEX") assert data[40] == 40 assert data.shape == (80, )
class TestPypeline: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' np.random.seed(1) image_3d = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) image_2d = np.random.normal(loc=0, scale=2e-4, size=(1, 10, 10)) science = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) dark = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'w') as hdf_file: hdf_file.create_dataset('image_3d', data=image_3d) hdf_file.create_dataset('image_2d', data=image_2d) hdf_file.create_dataset('science', data=science) hdf_file.create_dataset('dark', data=dark) create_star_data(path=self.test_dir + 'images') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['images']) def test_output_port_name(self): read = FitsReadingModule(name_in='read', input_dir=self.test_dir + 'images', image_tag='images') read.add_output_port('test') with pytest.warns(UserWarning) as warning: read.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ReadingModule \'read\' is already ' \ 'used.' process = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='images', image_out_tag='im_out') process.add_output_port('test') with pytest.warns(UserWarning) as warning: process.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ProcessingModule \'badpixel\' is ' \ 'already used.' self.pipeline.m_data_storage.close_connection() process._m_data_base = self.test_dir + 'database.hdf5' process.add_output_port('new') def test_apply_function_to_images_3d(self): self.pipeline.set_attribute('config', 'MEMORY', 1, static=True) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in='remove1', image_in_tag='image_3d', image_out_tag='remove_3d') self.pipeline.add_module(remove) self.pipeline.run_module('remove1') data = self.pipeline.get_data('image_3d') assert np.allclose(np.mean(data), 1.0141852764605783e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) data = self.pipeline.get_data('remove_3d') assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_2d(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in='remove2', image_in_tag='image_2d', image_out_tag='remove_2d') self.pipeline.add_module(remove) self.pipeline.run_module('remove2') data = self.pipeline.get_data('image_2d') assert np.allclose(np.mean(data), 1.2869483197883442e-05, rtol=limit, atol=0.) assert data.shape == (1, 10, 10) data = self.pipeline.get_data('remove_2d') assert np.allclose(np.mean(data), 1.3957075246029751e-05, rtol=limit, atol=0.) assert data.shape == (1, 10, 9) def test_apply_function_to_images_same_port(self): dark = DarkCalibrationModule(name_in='dark1', image_in_tag='science', dark_in_tag='dark', image_out_tag='science') self.pipeline.add_module(dark) self.pipeline.run_module('dark1') data = self.pipeline.get_data('science') assert np.allclose(np.mean(data), -3.190113568690675e-06, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) dark = DarkCalibrationModule(name_in='dark2', image_in_tag='science', dark_in_tag='dark', image_out_tag='science') self.pipeline.add_module(dark) self.pipeline.run_module('dark2') data = self.pipeline.get_data('science') assert np.allclose(np.mean(data), -1.026073475228737e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in='remove3', image_in_tag='remove_3d', image_out_tag='remove_3d') self.pipeline.add_module(remove) with pytest.raises(ValueError) as error: self.pipeline.run_module('remove3') assert str(error.value) == 'Input and output port have the same tag while the input ' \ 'function is changing the image shape. This is only ' \ 'possible with MEMORY=None.' def test_apply_function_to_images_memory_none(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in='remove4', image_in_tag='image_3d', image_out_tag='remove_3d_none') self.pipeline.add_module(remove) self.pipeline.run_module('remove4') data = self.pipeline.get_data('remove_3d_none') assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_3d_args(self): self.pipeline.set_attribute('config', 'MEMORY', 1, static=True) self.pipeline.set_attribute('image_3d', 'PIXSCALE', 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in='scale1', image_in_tag='image_3d', image_out_tag='scale_3d') self.pipeline.add_module(scale) self.pipeline.run_module('scale1') data = self.pipeline.get_data('scale_3d') assert np.allclose(np.mean(data), 7.042953308754017e-05, rtol=limit, atol=0.) assert data.shape == (4, 12, 12) attribute = self.pipeline.get_attribute('scale_3d', 'PIXSCALE', static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.) def test_apply_function_to_images_2d_args(self): self.pipeline.set_attribute('image_2d', 'PIXSCALE', 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in='scale2', image_in_tag='image_2d', image_out_tag='scale_2d') self.pipeline.add_module(scale) self.pipeline.run_module('scale2') data = self.pipeline.get_data('scale_2d') assert np.allclose(np.mean(data), 8.937141109641279e-05, rtol=limit, atol=0.) assert data.shape == (1, 12, 12) attribute = self.pipeline.get_attribute('scale_2d', 'PIXSCALE', static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.)
class TestFrameSelection(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir+"images", ndit=10, nframes=11) create_config(self.test_dir+"PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["images"]) def test_read_data(self): read = FitsReadingModule(name_in="read", image_tag="read", input_dir=self.test_dir+"images", overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read") data = self.pipeline.get_data("read") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0001002167910262529, rtol=limit, atol=0.) assert data.shape == (44, 100, 100) def test_remove_last_frame(self): last = RemoveLastFrameModule(name_in="last", image_in_tag="read", image_out_tag="last") self.pipeline.add_module(last) self.pipeline.run_module("last") data = self.pipeline.get_data("last") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010020258903646778, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) self.pipeline.set_attribute("last", "PARANG", np.arange(0., 40., 1.), static=False) star = np.zeros((40, 2)) star[:, 0] = np.arange(40., 80., 1.) star[:, 1] = np.arange(40., 80., 1.) self.pipeline.set_attribute("last", "STAR_POSITION", star, static=False) attribute = self.pipeline.get_attribute("last", "PARANG", static=False) assert np.allclose(np.mean(attribute), 19.5, rtol=limit, atol=0.) assert attribute.shape == (40, ) attribute = self.pipeline.get_attribute("last", "STAR_POSITION", static=False) assert np.allclose(np.mean(attribute), 59.5, rtol=limit, atol=0.) assert attribute.shape == (40, 2) def test_remove_start_frame(self): start = RemoveStartFramesModule(frames=2, name_in="start", image_in_tag="last", image_out_tag="start") self.pipeline.add_module(start) self.pipeline.run_module("start") data = self.pipeline.get_data("start") assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010011298467340513, rtol=limit, atol=0.) assert data.shape == (32, 100, 100) attribute = self.pipeline.get_attribute("start", "PARANG", static=False) assert np.allclose(np.mean(attribute), 20.5, rtol=limit, atol=0.) assert attribute.shape == (32, ) attribute = self.pipeline.get_attribute("start", "STAR_POSITION", static=False) assert np.allclose(np.mean(attribute), 60.5, rtol=limit, atol=0.) assert attribute.shape == (32, 2) def test_remove_frames(self): remove = RemoveFramesModule(frames=(5, 8, 13, 25, 31), name_in="remove", image_in_tag="start", selected_out_tag="selected", removed_out_tag="removed") self.pipeline.add_module(remove) self.pipeline.run_module("remove") data = self.pipeline.get_data("selected") assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.984682304434105e-05, rtol=limit, atol=0.) assert data.shape == (27, 100, 100) data = self.pipeline.get_data("removed") assert np.allclose(data[0, 50, 50], 0.09818692015286978, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010155025747035087, rtol=limit, atol=0.) assert data.shape == (5, 100, 100) attribute = self.pipeline.get_attribute("selected", "PARANG", static=False) assert np.allclose(np.mean(attribute), 20.296296296296298, rtol=limit, atol=0.) assert attribute.shape == (27, ) attribute = self.pipeline.get_attribute("selected", "STAR_POSITION", static=False) assert np.allclose(np.mean(attribute), 60.2962962962963, rtol=limit, atol=0.) assert attribute.shape == (27, 2) attribute = self.pipeline.get_attribute("removed", "PARANG", static=False) assert np.allclose(np.mean(attribute), 21.6, rtol=limit, atol=0.) assert attribute.shape == (5, ) attribute = self.pipeline.get_attribute("removed", "STAR_POSITION", static=False) assert np.allclose(np.mean(attribute), 61.6, rtol=limit, atol=0.) assert attribute.shape == (5, 2) def test_frame_selection(self): select = FrameSelectionModule(name_in="select1", image_in_tag="start", selected_out_tag="selected1", removed_out_tag="removed1", index_out_tag="index1", method="median", threshold=1., fwhm=0.1, aperture=("circular", 0.2), position=(None, None, 0.5)) self.pipeline.add_module(select) self.pipeline.run_module("select1") data = self.pipeline.get_data("selected1") assert np.allclose(data[0, 50, 50], 0.09791350617182591, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.980792188317311e-05, rtol=limit, atol=0.) assert data.shape == (22, 100, 100) data = self.pipeline.get_data("removed1") assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010078412281191547, rtol=limit, atol=0.) assert data.shape == (10, 100, 100) data = self.pipeline.get_data("index1") assert data[-1] == 28 assert np.sum(data) == 115 assert data.shape == (10, ) attribute = self.pipeline.get_attribute("selected1", "PARANG", static=False) assert np.allclose(np.mean(attribute), 22.681818181818183, rtol=limit, atol=0.) assert attribute.shape == (22, ) attribute = self.pipeline.get_attribute("selected1", "STAR_POSITION", static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (22, 2) attribute = self.pipeline.get_attribute("removed1", "PARANG", static=False) assert np.allclose(np.mean(attribute), 15.7, rtol=limit, atol=0.) assert attribute.shape == (10, ) attribute = self.pipeline.get_attribute("removed1", "STAR_POSITION", static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (10, 2) select = FrameSelectionModule(name_in="select2", image_in_tag="start", selected_out_tag="selected2", removed_out_tag="removed2", index_out_tag="index2", method="max", threshold=3., fwhm=0.1, aperture=("annulus", 0.1, 0.2), position=None) self.pipeline.add_module(select) self.pipeline.run_module("select2") data = self.pipeline.get_data("selected2") assert np.allclose(data[0, 50, 50], 0.09797376304048713, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010037996502199598, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data("removed2") assert np.allclose(data[0, 50, 50], 0.097912284606689, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 9.966801742575358e-05, rtol=limit, atol=0.) assert data.shape == (12, 100, 100) data = self.pipeline.get_data("index2") assert data[-1] == 30 assert np.sum(data) == 230 assert data.shape == (12, ) attribute = self.pipeline.get_attribute("selected2", "PARANG", static=False) assert np.allclose(np.mean(attribute), 17.8, rtol=limit, atol=0.) assert attribute.shape == (20, ) attribute = self.pipeline.get_attribute("selected2", "STAR_POSITION", static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (20, 2) attribute = self.pipeline.get_attribute("removed2", "PARANG", static=False) assert np.allclose(np.mean(attribute), 25.0, rtol=limit, atol=0.) assert attribute.shape == (12, ) attribute = self.pipeline.get_attribute("removed2", "STAR_POSITION", static=False) assert np.allclose(np.mean(attribute), 50.0, rtol=limit, atol=0.) assert attribute.shape == (12, 2)
class TestPsfSubtraction: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' create_fake(path=self.test_dir + 'science', ndit=[20, 20, 20, 20], nframes=[20, 20, 20, 20], exp_no=[1, 2, 3, 4], npix=(100, 100), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=10., contrast=3e-3) create_fake(path=self.test_dir + 'reference', ndit=[10, 10, 10, 10], nframes=[10, 10, 10, 10], exp_no=[1, 2, 3, 4], npix=(100, 100), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=None, contrast=None) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['science', 'reference']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='science', input_dir=self.test_dir + 'science') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('science') assert np.allclose(data[0, 50, 50], 0.09798413502193708, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010063896953157961, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) module = FitsReadingModule(name_in='read2', image_tag='reference', input_dir=self.test_dir + 'reference') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('reference') assert np.allclose(data[0, 50, 50], 0.09798413502193708, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_angle_interpolation(self) -> None: module = AngleInterpolationModule(name_in='angle', data_tag='science') self.pipeline.add_module(module) self.pipeline.run_module('angle') data = self.pipeline.get_data('header_science/PARANG') assert np.allclose(data[0], 0., rtol=limit, atol=0.) assert np.allclose(data[15], 19.736842105263158, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 50.0, rtol=limit, atol=0.) assert data.shape == (80, ) def test_psf_preparation(self) -> None: module = PSFpreparationModule(name_in='prep1', image_in_tag='science', image_out_tag='science_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.2, edge_size=1.0) self.pipeline.add_module(module) self.pipeline.run_module('prep1') data = self.pipeline.get_data('science_prep') assert np.allclose(data[0, 0, 0], 0.0, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], 0.0, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 4.534001223501053e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) module = PSFpreparationModule(name_in='prep2', image_in_tag='reference', image_out_tag='reference_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.2, edge_size=1.0) self.pipeline.add_module(module) self.pipeline.run_module('prep2') data = self.pipeline.get_data('reference_prep') assert np.allclose(data[0, 0, 0], 0.0, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], 0.0, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 1.227592050148539e-07, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_classical_adi(self) -> None: module = ClassicalADIModule(threshold=None, nreference=None, residuals='mean', extra_rot=0., name_in='cadi1', image_in_tag='science', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi1') data = self.pipeline.get_data('cadi_res') assert np.allclose(np.mean(data), -6.359018260066029e-08, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('cadi_stack') assert np.allclose(np.mean(data), -8.318786331552922e-08, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) def test_classical_adi_threshold(self) -> None: module = ClassicalADIModule(threshold=(0.1, 0.03, 1.), nreference=5, residuals='median', extra_rot=0., name_in='cadi2', image_in_tag='science', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi2') data = self.pipeline.get_data('cadi_res') assert np.allclose(np.mean(data), 1.6523183877608216e-07, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('cadi_stack') assert np.allclose(np.mean(data), 1.413437242880268e-07, rtol=limit, atol=0.) assert data.shape == (1, 100, 100) def test_psf_subtraction_pca_single(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_single', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_single', res_median_tag='res_median_single', res_weighted_tag='res_weighted_single', res_rot_mean_clip_tag='res_clip_single', res_arr_out_tag='res_arr_single', basis_out_tag='basis_single', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_single') data = self.pipeline.get_data('res_mean_single') assert np.allclose(np.mean(data), 2.6959819771522928e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_median_single') assert np.allclose(np.mean(data), -2.4142571236920345e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_weighted_single') assert np.allclose(np.mean(data), -5.293559651636843e-09, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_clip_single') assert np.allclose(np.mean(data), 2.6199554737979536e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_arr_single5') assert np.allclose(np.mean(data), 3.184676024912723e-08, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('basis_single') assert np.allclose(np.mean(data), -1.593245396350998e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_no_mean', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean') data = self.pipeline.get_data('res_mean_no_mean') assert np.allclose(np.mean(data), 2.413203757426239e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_no_mean') assert np.allclose(np.mean(data), 7.4728664805632875e-06, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_ref', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref') data = self.pipeline.get_data('res_mean_ref') assert np.allclose(np.mean(data), 1.1662201512335965e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref') assert np.allclose(np.mean(data), -1.6780507257603104e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_ref_no_mean', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean') data = self.pipeline.get_data('res_mean_ref_no_mean') assert np.allclose(np.mean(data), 3.7029738044199534e-07, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref_no_mean') assert np.allclose(np.mean(data), 2.3755682312090375e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_pca_single_mask(self) -> None: pca = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_single_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_single_mask', res_median_tag='res_median_single_mask', res_weighted_tag='res_weighted_single_mask', res_rot_mean_clip_tag='res_clip_single_mask', res_arr_out_tag='res_arr_single_mask', basis_out_tag='basis_single_mask', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(pca) self.pipeline.run_module('pca_single_mask') data = self.pipeline.get_data('res_mean_single_mask') assert np.allclose(np.mean(data), -1.6536519510012155e-09, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_median_single_mask') assert np.allclose(np.mean(data), 5.6094356668078245e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_weighted_single_mask') assert np.allclose(np.mean(data), 4.7079857263662695e-08, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_clip_single_mask') assert np.allclose(np.mean(data), -4.875856901892831e-10, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('res_arr_single_mask5') assert np.allclose(np.mean(data), -1.700674890172441e-09, rtol=limit, atol=0.) assert data.shape == (80, 100, 100) data = self.pipeline.get_data('basis_single_mask') assert np.allclose(np.mean(data), 5.584100479595007e-06, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_no_mean_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean_mask') data = self.pipeline.get_data('res_mean_no_mean_mask') assert np.allclose(np.mean(data), -1.0905008724474168e-09, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_no_mean_mask') assert np.allclose(np.sum(np.abs(data)), 1025.2018448288406, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 21), name_in='pca_ref_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_mask', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_mask') data = self.pipeline.get_data('res_mean_ref_mask') assert np.allclose(np.mean(data), -9.962692629500833e-10, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref_mask') assert np.allclose(np.mean(data), -2.3165670099810983e-05, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_ref_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_ref_no_mean_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean_mask') data = self.pipeline.get_data('res_mean_ref_no_mean_mask') assert np.allclose(np.mean(data), 3.848255803450399e-07, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) data = self.pipeline.get_data('basis_ref_no_mean_mask') assert np.allclose(np.sum(np.abs(data)), 1026.3329224435665, rtol=limit, atol=0.) assert data.shape == (20, 100, 100) def test_psf_subtraction_pca_multi(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_multi', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_multi', res_median_tag='res_median_multi', res_weighted_tag='res_weighted_multi', res_rot_mean_clip_tag='res_clip_multi', res_arr_out_tag=None, basis_out_tag='basis_multi', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi') data_single = self.pipeline.get_data('res_mean_single') data_multi = self.pipeline.get_data('res_mean_multi') assert np.allclose(data_single, data_multi, rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_median_single') data_multi = self.pipeline.get_data('res_median_multi') assert np.allclose(data_single, data_multi, rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_weighted_single') data_multi = self.pipeline.get_data('res_weighted_multi') assert np.allclose(data_single, data_multi, rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('basis_single') data_multi = self.pipeline.get_data('basis_multi') assert np.allclose(data_single, data_multi, rtol=1e-5, atol=0.) assert data_single.shape == data_multi.shape def test_psf_subtraction_pca_multi_mask(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule( pca_numbers=range(1, 21), name_in='pca_multi_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_multi_mask', res_median_tag='res_median_multi_mask', res_weighted_tag='res_weighted_multi_mask', res_rot_mean_clip_tag='res_clip_multi_mask', res_arr_out_tag=None, basis_out_tag='basis_multi_mask', extra_rot=-15., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi_mask') data_single = self.pipeline.get_data('res_mean_single_mask') data_multi = self.pipeline.get_data('res_mean_multi_mask') assert np.allclose(data_single[data_single > 1e-12], data_multi[data_multi > 1e-12], rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_median_single_mask') data_multi = self.pipeline.get_data('res_median_multi_mask') assert np.allclose(data_single[data_single > 1e-12], data_multi[data_multi > 1e-12], rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('res_weighted_single_mask') data_multi = self.pipeline.get_data('res_weighted_multi_mask') assert np.allclose(data_single[data_single > 1e-12], data_multi[data_multi > 1e-12], rtol=1e-6, atol=0.) assert data_single.shape == data_multi.shape data_single = self.pipeline.get_data('basis_single_mask') data_multi = self.pipeline.get_data('basis_multi_mask') assert np.allclose(data_single, data_multi, rtol=1e-5, atol=0.) assert data_single.shape == data_multi.shape def test_psf_subtraction_len_parang(self) -> None: database = h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') database['config'].attrs['CPU'] = 1 parang = self.pipeline.get_data('header_science/PARANG') self.pipeline.set_attribute('science_prep', 'PARANG', np.append(parang, 0.), static=False) module = PcaPsfSubtractionModule(pca_numbers=[ 5, ], name_in='pca_len_parang', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_len_parang', extra_rot=0.) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('pca_len_parang') assert str(error.value) == 'The number of images (80) is not equal to the number of ' \ 'parallactic angles (81).'
class TestProcessing: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) large_data = np.random.normal(loc=0, scale=2e-4, size=(10000, 5, 5)) with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'w') as hdf_file: hdf_file.create_dataset('images', data=images) hdf_file.create_dataset('large_data', data=large_data) create_star_data(path=self.test_dir + 'images') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) self.pipeline.set_attribute('images', 'PIXSCALE', 0.1, static=True) self.pipeline.set_attribute('large_data', 'PIXSCALE', 0.1, static=True) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_output_port_name(self) -> None: module = FitsReadingModule(name_in='read', image_tag='images', input_dir=self.test_dir + 'images') module.add_output_port('test') with pytest.warns(UserWarning) as warning: module.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ReadingModule \'read\' is already ' \ 'used.' module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='images', image_out_tag='im_out') module.add_output_port('test') with pytest.warns(UserWarning) as warning: module.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ProcessingModule \'badpixel\' is ' \ 'already used.' def test_output_port_set_connection(self) -> None: self.pipeline.m_data_storage.open_connection() module = BadPixelSigmaFilterModule(name_in='badpixel2', image_in_tag='images', image_out_tag='im_out') self.pipeline.add_module(module) port = module.add_output_port('test1') self.pipeline.m_data_storage.close_connection() def test_apply_function(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 20, static=True) self.pipeline.set_attribute('config', 'CPU', 4, static=True) module = LineSubtractionModule(name_in='subtract', image_in_tag='images', image_out_tag='im_subtract', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract') data = self.pipeline.get_data('images') assert np.mean(data) == pytest.approx(1.1824138000882435e-05, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) data = self.pipeline.get_data('im_subtract') assert np.mean(data) == pytest.approx(-1.2544487946113274e-21, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_apply_function_args_none(self) -> None: module = TimeNormalizationModule(name_in='norm', image_in_tag='images', image_out_tag='im_norm') self.pipeline.add_module(module) self.pipeline.run_module('norm') data = self.pipeline.get_data('im_norm') assert np.mean(data) == pytest.approx(2.4012571778516812e-06, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_apply_function_args_none_memory_none(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) module = TimeNormalizationModule(name_in='norm_none', image_in_tag='images', image_out_tag='im_norm') self.pipeline.add_module(module) self.pipeline.run_module('norm_none') data = self.pipeline.get_data('im_norm') assert np.mean(data) == pytest.approx(2.4012571778516812e-06, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_apply_function_same_port(self) -> None: module = LineSubtractionModule(name_in='subtract_same', image_in_tag='im_subtract', image_out_tag='im_subtract', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract_same') data = self.pipeline.get_data('im_subtract') assert np.mean(data) == pytest.approx(-1.4336557652700885e-21, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_apply_function_args_none_memory_none_same_port(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) data = self.pipeline.get_data('images') assert np.mean(data) == pytest.approx(1.1824138000882435e-05, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) module = TimeNormalizationModule(name_in='norm_none_same', image_in_tag='images', image_out_tag='images') self.pipeline.add_module(module) self.pipeline.run_module('norm_none_same') data = self.pipeline.get_data('images') assert np.mean(data) == pytest.approx(2.4012571778516812e-06, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_apply_function_to_images_memory_none(self) -> None: module = StarExtractionModule(name_in='extract', image_in_tag='im_subtract', image_out_tag='extract', index_out_tag=None, image_size=0.5, fwhm_star=0.1, position=(None, None, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('extract') data = self.pipeline.get_data('extract') assert np.mean(data) == pytest.approx(1.8259937251367536e-05, rel=self.limit, abs=0.) assert data.shape == (5, 5, 5)
class TestPsfPreparation: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'prep') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=['prep']) def test_read_data(self): module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir + 'prep') self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_angle_interpolation(self): module = AngleInterpolationModule(name_in='angle1', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle1') data = self.pipeline.get_data('header_read/PARANG') assert np.allclose(data[0], 0., rtol=limit, atol=0.) assert np.allclose(data[15], 7.777777777777778, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_angle_calculation(self): self.pipeline.set_attribute('read', 'LATITUDE', -25.) self.pipeline.set_attribute('read', 'LONGITUDE', -70.) self.pipeline.set_attribute('read', 'DIT', 1.) self.pipeline.set_attribute('read', 'RA', (90., 90., 90., 90.), static=False) self.pipeline.set_attribute('read', 'DEC', (-51., -51., -51., -51.), static=False) self.pipeline.set_attribute('read', 'PUPIL', (90., 90., 90., 90.), static=False) date = ('2012-12-01T07:09:00.0000', '2012-12-01T07:09:01.0000', '2012-12-01T07:09:02.0000', '2012-12-01T07:09:03.0000') self.pipeline.set_attribute('read', 'DATE', date, static=False) module = AngleCalculationModule(instrument='NACO', name_in='angle2', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle2') data = self.pipeline.get_data('header_read/PARANG') assert np.allclose(data[0], -55.04109770947442, rtol=limit, atol=0.) assert np.allclose(np.mean(data), -54.99858360618869, rtol=limit, atol=0.) assert data.shape == (40, ) self.pipeline.set_attribute('read', 'RA', (60000.0, 60000.0, 60000.0, 60000.0), static=False) self.pipeline.set_attribute('read', 'DEC', (-510000., -510000., -510000., -510000.), static=False) module = AngleCalculationModule(instrument='SPHERE/IRDIS', name_in='angle3', data_tag='read') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('angle3') assert warning[0].message.args[0] == 'For SPHERE data it is recommended to use the ' \ 'header keyword \'ESO INS4 DROT2 RA\' to specify ' \ 'the object\'s right ascension. The input will be ' \ 'parsed accordingly. Using the regular \'RA\' '\ 'keyword will lead to wrong parallactic angles.' \ assert warning[1].message.args[0] == 'For SPHERE data it is recommended to use the ' \ 'header keyword \'ESO INS4 DROT2 DEC\' to specify ' \ 'the object\'s declination. The input will be ' \ 'parsed accordingly. Using the regular \'DEC\' '\ 'keyword will lead to wrong parallactic angles.' \ data = self.pipeline.get_data('header_read/PARANG') assert np.allclose(data[0], 170.39102715170227, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 170.46341123194824, rtol=limit, atol=0.) assert data.shape == (40, ) module = AngleCalculationModule(instrument='SPHERE/IFS', name_in='angle4', data_tag='read') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('angle4') assert warning[0].message.args[0] == 'AngleCalculationModule has not been tested for ' \ 'SPHERE/IFS data.' assert warning[1].message.args[0] == 'For SPHERE data it is recommended to use the ' \ 'header keyword \'ESO INS4 DROT2 RA\' to specify ' \ 'the object\'s right ascension. The input will be ' \ 'parsed accordingly. Using the regular \'RA\' '\ 'keyword will lead to wrong parallactic angles.' \ assert warning[2].message.args[0] == 'For SPHERE data it is recommended to use the ' \ 'header keyword \'ESO INS4 DROT2 DEC\' to specify ' \ 'the object\'s declination. The input will be ' \ 'parsed accordingly. Using the regular \'DEC\' '\ 'keyword will lead to wrong parallactic angles.' \ data = self.pipeline.get_data('header_read/PARANG') assert np.allclose(data[0], -89.12897284829768, rtol=limit, atol=0.) assert np.allclose(np.mean(data), -89.02755918786514, rtol=limit, atol=0.) assert data.shape == (40, ) def test_angle_interpolation_mismatch(self): self.pipeline.set_attribute('read', 'NDIT', [9, 9, 9, 9], static=False) module = AngleInterpolationModule(name_in='angle5', data_tag='read') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('angle5') assert len(warning) == 1 assert warning[0].message.args[0] == 'There is a mismatch between the NDIT and NFRAMES ' \ 'values. The parallactic angles are calculated ' \ 'with a linear interpolation by using NFRAMES ' \ 'steps. A frame selection should be applied ' \ 'after the parallactic angles are calculated.' data = self.pipeline.get_data('header_read/PARANG') assert np.allclose(data[0], 0., rtol=limit, atol=0.) assert np.allclose(data[15], 7.777777777777778, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 10.0, rtol=limit, atol=0.) assert data.shape == (40, ) def test_psf_preparation_norm_mask(self): module = PSFpreparationModule(name_in='prep1', image_in_tag='read', image_out_tag='prep1', mask_out_tag='mask1', norm=True, cent_size=0.1, edge_size=1.0) self.pipeline.add_module(module) self.pipeline.run_module('prep1') data = self.pipeline.get_data('prep1') assert np.allclose(data[0, 0, 0], 0., rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], 0., rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0001690382058762809, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) data = self.pipeline.get_data('mask1') assert np.allclose(data[0, 0], 0., rtol=limit, atol=0.) assert np.allclose(data[99, 99], 0., rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.4268, rtol=limit, atol=0.) assert data.shape == (100, 100) def test_psf_preparation_none(self): module = PSFpreparationModule(name_in='prep2', image_in_tag='read', image_out_tag='prep2', mask_out_tag='mask2', norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(module) self.pipeline.run_module('prep2') data = self.pipeline.get_data('prep2') assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], -0.000287573978535779, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_psf_preparation_no_mask_out(self): module = PSFpreparationModule(name_in='prep3', image_in_tag='read', image_out_tag='prep3', mask_out_tag=None, norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(module) self.pipeline.run_module('prep3') data = self.pipeline.get_data('prep3') assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.) assert np.allclose(data[0, 25, 25], 2.0926464668090656e-05, rtol=limit, atol=0.) assert np.allclose(data[0, 99, 99], -0.000287573978535779, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_sdi_preparation(self): module = SDIpreparationModule(name_in='sdi', wavelength=(0.65, 0.6), width=(0.1, 0.5), image_in_tag='read', image_out_tag='sdi') self.pipeline.add_module(module) self.pipeline.run_module('sdi') data = self.pipeline.get_data('sdi') assert np.allclose(data[0, 25, 25], -2.6648118007008814e-05, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 2.0042892634995876e-05, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) attribute = self.pipeline.get_attribute( 'sdi', 'History: SDIpreparationModule') assert attribute == '(line, continuum) = (0.65, 0.6)'
class TestProcessing: def setup_class(self) -> None: self.test_dir = os.path.dirname(__file__) + '/' np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(100, 10, 10)) large_data = np.random.normal(loc=0, scale=2e-4, size=(10000, 100, 100)) with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'w') as hdf_file: hdf_file.create_dataset('images', data=images) hdf_file.create_dataset('large_data', data=large_data) create_star_data(path=self.test_dir + 'images') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) self.pipeline.set_attribute('images', 'PIXSCALE', 0.1, static=True) self.pipeline.set_attribute('large_data', 'PIXSCALE', 0.1, static=True) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_output_port_name(self) -> None: module = FitsReadingModule(name_in='read', image_tag='images', input_dir=self.test_dir + 'images') module.add_output_port('test') with pytest.warns(UserWarning) as warning: module.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ReadingModule \'read\' is already ' \ 'used.' module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='images', image_out_tag='im_out') module.add_output_port('test') with pytest.warns(UserWarning) as warning: module.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ProcessingModule \'badpixel\' is ' \ 'already used.' self.pipeline.m_data_storage.close_connection() def test_apply_function(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 20, static=True) self.pipeline.set_attribute('config', 'CPU', 4, static=True) module = LineSubtractionModule(name_in='subtract', image_in_tag='images', image_out_tag='im_subtract', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract') data = self.pipeline.get_data('images') assert np.allclose(np.mean(data), 1.9545313398209947e-06, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) data = self.pipeline.get_data('im_subtract') assert np.allclose(np.mean(data), 5.529431079676073e-22, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_args_none(self) -> None: module = TimeNormalizationModule(name_in='norm', image_in_tag='images', image_out_tag='im_norm') self.pipeline.add_module(module) self.pipeline.run_module('norm') data = self.pipeline.get_data('im_norm') assert np.allclose(np.mean(data), -3.3117684144801347e-07, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_args_none_memory_none(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) module = TimeNormalizationModule(name_in='norm_none', image_in_tag='images', image_out_tag='im_norm') self.pipeline.add_module(module) self.pipeline.run_module('norm_none') data = self.pipeline.get_data('im_norm') assert np.allclose(np.mean(data), -3.3117684144801347e-07, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_same_port(self) -> None: module = LineSubtractionModule(name_in='subtract_same', image_in_tag='im_subtract', image_out_tag='im_subtract', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract_same') data = self.pipeline.get_data('im_subtract') assert np.allclose(np.mean(data), 7.318364664277155e-22, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_args_none_memory_none_same_port(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) data = self.pipeline.get_data('images') assert np.allclose(np.mean(data), 1.9545313398209947e-06, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) module = TimeNormalizationModule(name_in='norm_none_same', image_in_tag='images', image_out_tag='images') self.pipeline.add_module(module) self.pipeline.run_module('norm_none_same') data = self.pipeline.get_data('images') assert np.allclose(np.mean(data), -3.3117684144801347e-07, rtol=limit, atol=0.) assert data.shape == (100, 10, 10) def test_apply_function_to_images_memory_none(self) -> None: module = StarExtractionModule(name_in='extract', image_in_tag='im_subtract', image_out_tag='extract', index_out_tag=None, image_size=0.5, fwhm_star=0.1, position=(None, None, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('extract') data = self.pipeline.get_data('extract') assert np.allclose(np.mean(data), 1.5591859111937413e-07, rtol=limit, atol=0.) assert data.shape == (100, 5, 5) def test_multiproc_large_data(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 1000, static=True) self.pipeline.set_attribute('config', 'CPU', 1, static=True) module = LineSubtractionModule(name_in='subtract_single', image_in_tag='large_data', image_out_tag='im_sub_single', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract_single') self.pipeline.set_attribute('config', 'CPU', 4, static=True) module = LineSubtractionModule(name_in='subtract_multi', image_in_tag='large_data', image_out_tag='im_sub_multi', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract_multi') data_single = self.pipeline.get_data('im_sub_single') data_multi = self.pipeline.get_data('im_sub_multi') assert np.allclose(data_single, data_multi, rtol=limit, atol=0.) assert data_single.shape == data_multi.shape
class TestPypeline(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" np.random.seed(1) image_3d = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) image_2d = np.random.normal(loc=0, scale=2e-4, size=(10, 10)) science = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) dark = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) h5f = h5py.File(self.test_dir + "PynPoint_database.hdf5", "w") h5f.create_dataset("image_3d", data=image_3d) h5f.create_dataset("image_2d", data=image_2d) h5f.create_dataset("science", data=science) h5f.create_dataset("dark", data=dark) h5f.close() create_star_data(path=self.test_dir + "images") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["images"]) def test_output_port_name(self): read = FitsReadingModule(name_in="read", input_dir=self.test_dir + "images", image_tag="images") read.add_output_port("test") with pytest.warns(UserWarning) as warning: read.add_output_port("test") assert len(warning) == 1 assert warning[0].message.args[ 0] == "Tag 'test' of ReadingModule 'read' is already used." process = BadPixelSigmaFilterModule(name_in="badpixel", image_in_tag="images") process.add_output_port("test") with pytest.warns(UserWarning) as warning: process.add_output_port("test") assert len(warning) == 1 assert warning[0].message.args[0] == "Tag 'test' of ProcessingModule 'badpixel' is " \ "already used." self.pipeline.m_data_storage.close_connection() process._m_data_base = self.test_dir + "database.hdf5" process.add_output_port("new") def test_apply_function_to_images_3d(self): self.pipeline.set_attribute("config", "MEMORY", 1, static=True) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove1", image_in_tag="image_3d", image_out_tag="remove_3d") self.pipeline.add_module(remove) self.pipeline.run_module("remove1") data = self.pipeline.get_data("image_3d") assert np.allclose(np.mean(data), 1.0141852764605783e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) data = self.pipeline.get_data("remove_3d") assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_2d(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove2", image_in_tag="image_2d", image_out_tag="remove_2d") self.pipeline.add_module(remove) self.pipeline.run_module("remove2") data = self.pipeline.get_data("image_2d") assert np.allclose(np.mean(data), 1.2869483197883442e-05, rtol=limit, atol=0.) assert data.shape == (10, 10) data = self.pipeline.get_data("remove_2d") assert np.allclose(np.mean(data), 1.3957075246029751e-05, rtol=limit, atol=0.) assert data.shape == (10, 9) def test_apply_function_to_images_same_port(self): dark = DarkCalibrationModule(name_in="dark1", image_in_tag="science", dark_in_tag="dark", image_out_tag="science") self.pipeline.add_module(dark) self.pipeline.run_module("dark1") data = self.pipeline.get_data("science") assert np.allclose(np.mean(data), -3.190113568690675e-06, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) self.pipeline.set_attribute("config", "MEMORY", 0, static=True) dark = DarkCalibrationModule(name_in="dark2", image_in_tag="science", dark_in_tag="dark", image_out_tag="science") self.pipeline.add_module(dark) self.pipeline.run_module("dark2") data = self.pipeline.get_data("science") assert np.allclose(np.mean(data), -1.026073475228737e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 10) remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove3", image_in_tag="remove_3d", image_out_tag="remove_3d") self.pipeline.add_module(remove) with pytest.raises(ValueError) as error: self.pipeline.run_module("remove3") assert str(error.value) == "Input and output port have the same tag while the input " \ "function is changing the image shape. This is only " \ "possible with MEMORY=None." def test_apply_function_to_images_memory_none(self): remove = RemoveLinesModule(lines=(1, 0, 0, 0), name_in="remove4", image_in_tag="image_3d", image_out_tag="remove_3d_none") self.pipeline.add_module(remove) self.pipeline.run_module("remove4") data = self.pipeline.get_data("remove_3d_none") assert np.allclose(np.mean(data), 1.1477029889801025e-05, rtol=limit, atol=0.) assert data.shape == (4, 10, 9) def test_apply_function_to_images_3d_args(self): self.pipeline.set_attribute("config", "MEMORY", 1, static=True) self.pipeline.set_attribute("image_3d", "PIXSCALE", 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in="scale1", image_in_tag="image_3d", image_out_tag="scale_3d") self.pipeline.add_module(scale) self.pipeline.run_module("scale1") data = self.pipeline.get_data("scale_3d") assert np.allclose(np.mean(data), 7.042953308754017e-05, rtol=limit, atol=0.) assert data.shape == (4, 12, 12) attribute = self.pipeline.get_attribute("scale_3d", "PIXSCALE", static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.) def test_apply_function_to_images_2d_args(self): self.pipeline.set_attribute("image_2d", "PIXSCALE", 0.1, static=True) scale = ScaleImagesModule(scaling=(1.2, 1.2, 10.), pixscale=True, name_in="scale2", image_in_tag="image_2d", image_out_tag="scale_2d") self.pipeline.add_module(scale) self.pipeline.run_module("scale2") data = self.pipeline.get_data("scale_2d") assert np.allclose(np.mean(data), 8.937141109641279e-05, rtol=limit, atol=0.) assert data.shape == (12, 12) attribute = self.pipeline.get_attribute("scale_2d", "PIXSCALE", static=True) assert np.allclose(attribute, 0.08333333333333334, rtol=limit, atol=0.)
class TestFluxPosition: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_fake_data(self.test_dir + 'adi') create_star_data(self.test_dir + 'psf', npix=21, pos_star=10.) create_star_data(self.test_dir + 'ref', npix=21, pos_star=10.) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['adi', 'psf', 'ref']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='adi', input_dir=self.test_dir + 'adi') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('adi') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) self.pipeline.set_attribute('adi', 'PARANG', np.linspace(0., 180., 10), static=False) module = FitsReadingModule(name_in='read2', image_tag='psf', input_dir=self.test_dir + 'psf') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('psf') assert np.sum(data) == pytest.approx(108.43655133957289, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) module = FitsReadingModule(name_in='read3', image_tag='ref', input_dir=self.test_dir + 'psf') self.pipeline.add_module(module) self.pipeline.run_module('read3') data = self.pipeline.get_data('ref') assert np.sum(data) == pytest.approx(108.43655133957289, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_aperture_photometry(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = AperturePhotometryModule(name_in='photometry1', image_in_tag='psf', phot_out_tag='photometry1', radius=0.1, position=None) self.pipeline.add_module(module) self.pipeline.run_module('photometry1') with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = AperturePhotometryModule(name_in='photometry2', image_in_tag='psf', phot_out_tag='photometry2', radius=0.1, position=None) self.pipeline.add_module(module) self.pipeline.run_module('photometry2') data = self.pipeline.get_data('photometry1') assert np.sum(data) == pytest.approx(100.80648929590365, rel=self.limit, abs=0.) assert data.shape == (10, 1) data_multi = self.pipeline.get_data('photometry2') assert data.shape == data_multi.shape assert data == pytest.approx(data_multi, rel=self.limit, abs=0.) def test_aperture_photometry_position(self) -> None: module = AperturePhotometryModule(name_in='photometry3', image_in_tag='psf', phot_out_tag='photometry3', radius=0.1, position=(10., 10.)) self.pipeline.add_module(module) self.pipeline.run_module('photometry3') data = self.pipeline.get_data('photometry3') assert np.sum(data) == pytest.approx(100.80648929590365, rel=self.limit, abs=0.) assert data.shape == (10, 1) def test_fake_planet(self) -> None: module = FakePlanetModule(position=(0.2, 180.), magnitude=2.5, psf_scaling=1., interpolation='spline', name_in='fake', image_in_tag='adi', psf_in_tag='psf', image_out_tag='fake') self.pipeline.add_module(module) self.pipeline.run_module('fake') data = self.pipeline.get_data('fake') assert np.sum(data) == pytest.approx(21.51956021269913, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_psf_subtraction(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=[ 1, ], name_in='pca', images_in_tag='fake', reference_in_tag='fake', res_mean_tag='res_mean', extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('pca') data = self.pipeline.get_data('res_mean') assert np.sum(data) == pytest.approx(0.014757351752469366, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) def test_false_positive(self) -> None: module = FalsePositiveModule(position=(10., 2.), aperture=0.06, ignore=True, name_in='false1', image_in_tag='res_mean', snr_out_tag='snr_fpf1', optimize=False) self.pipeline.add_module(module) self.pipeline.run_module('false1') data = self.pipeline.get_data('snr_fpf1') assert data[0, 1] == pytest.approx(2., rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.216, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(180., rel=self.limit, abs=0.) assert data[0, 4] == pytest.approx(23.555448981008507, rel=self.limit, abs=0.) assert data[0, 5] == pytest.approx(3.1561982060476726e-08, rel=self.limit, abs=0.) assert data.shape == (1, 6) def test_false_positive_optimize(self) -> None: module = FalsePositiveModule(position=(10., 2.), aperture=0.06, ignore=True, name_in='false2', image_in_tag='res_mean', snr_out_tag='snr_fpf2', optimize=True, offset=0.1, tolerance=0.01) self.pipeline.add_module(module) self.pipeline.run_module('false2') data = self.pipeline.get_data('snr_fpf2') assert data[0, 1] == pytest.approx(2.0681640624999993, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.21416845852767494, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(179.47800221910444, rel=self.limit, abs=0.) assert data[0, 4] == pytest.approx(24.254455766076823, rel=self.limit, abs=0.) assert data[0, 5] == pytest.approx(2.5776271254831863e-08, rel=self.limit, abs=0.) assert data.shape == (1, 6) def test_simplex_minimization_hessian(self) -> None: module = SimplexMinimizationModule(name_in='simplex1', image_in_tag='fake', psf_in_tag='psf', res_out_tag='simplex_res', flux_position_tag='flux_position', position=(10., 3.), magnitude=2.5, psf_scaling=-1., merit='hessian', aperture=0.06, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.06, edge_size=None, extra_rot=0., reference_in_tag=None, residuals='median', offset=1.) self.pipeline.add_module(module) self.pipeline.run_module('simplex1') data = self.pipeline.get_data('simplex_res') assert np.sum(data) == pytest.approx(0.07079158286664607, rel=self.limit, abs=0.) assert data.shape == (25, 21, 21) data = self.pipeline.get_data('flux_position') assert data[24, 0] == pytest.approx(9.933213305898484, rel=self.limit, abs=0.) assert data[24, 1] == pytest.approx(2.637268518518516, rel=self.limit, abs=0.) assert data[24, 2] == pytest.approx(0.198801928351391, rel=self.limit, abs=0.) assert data[24, 3] == pytest.approx(179.48028924294857, rel=self.limit, abs=0.) assert data[24, 4] == pytest.approx(2.4782450274348378, rel=self.limit, abs=0.) assert data.shape == (25, 6) def test_simplex_minimization_reference(self) -> None: module = SimplexMinimizationModule( name_in='simplex2', image_in_tag='fake', psf_in_tag='psf', res_out_tag='simplex_res_ref', flux_position_tag='flux_position_ref', position=(10., 3.), magnitude=2.5, psf_scaling=-1., merit='poisson', aperture=0.06, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.06, edge_size=None, extra_rot=0., reference_in_tag='ref', residuals='mean') self.pipeline.add_module(module) self.pipeline.run_module('simplex2') data = self.pipeline.get_data('simplex_res_ref') assert np.sum(data) == pytest.approx(9.914746160040783, rel=self.limit, abs=0.) assert data.shape == (28, 21, 21) data = self.pipeline.get_data('flux_position_ref') assert data[27, 0] == pytest.approx(10.049019964116436, rel=self.limit, abs=0.) assert data[27, 1] == pytest.approx(2.6444836362361936, rel=self.limit, abs=0.) assert data[27, 2] == pytest.approx(0.19860335205689572, rel=self.limit, abs=0.) assert data[27, 3] == pytest.approx(180.38183525629643, rel=self.limit, abs=0.) assert data[27, 4] == pytest.approx(2.5496922175196, rel=self.limit, abs=0.) assert data.shape == (28, 6) def test_mcmc_sampling(self) -> None: with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = DerotateAndStackModule(name_in='stack', image_in_tag='psf', image_out_tag='psf_stack', derotate=False, stack='mean') self.pipeline.add_module(module) self.pipeline.run_module('stack') data = self.pipeline.get_data('psf_stack') assert np.sum(data) == pytest.approx(10.843655133957288, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) module = MCMCsamplingModule(name_in='mcmc', image_in_tag='adi', psf_in_tag='psf_stack', chain_out_tag='mcmc', param=(0.15, 0., 1.), bounds=((0.1, 0.2), (-2., 2.), (-1., 2.)), nwalkers=6, nsteps=5, psf_scaling=-1., pca_number=1, aperture=(10, 16, 0.06), mask=None, extra_rot=0., merit='gaussian', residuals='median', sigma=(1e-3, 1e-1, 1e-2)) self.pipeline.add_module(module) self.pipeline.run_module('mcmc') def test_systematic_error(self) -> None: module = SystematicErrorModule(name_in='error', image_in_tag='adi', psf_in_tag='psf', offset_out_tag='offset', position=(0.162, 0.), magnitude=5., angles=(0., 180., 2), psf_scaling=1., merit='gaussian', aperture=0.06, tolerance=0.1, pca_number=1, mask=(None, None), extra_rot=0., residuals='median', offset=1.) self.pipeline.add_module(module) self.pipeline.run_module('error') data = self.pipeline.get_data('offset') assert data[0, 0] == pytest.approx(-0.0028749671933526733, rel=self.limit, abs=0.) assert data[0, 1] == pytest.approx(0.2786088210998514, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(-0.02916297162565762, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(-0.02969350583704866, rel=self.limit, abs=0.) assert data[0, 4] == pytest.approx(-0.10640807184499579, rel=self.limit, abs=0.) assert data.shape == (2, 5)
class TestExtract: def setup_class(self): self.test_dir = os.path.dirname(__file__) + '/' create_star_data(path=self.test_dir + 'star', npix_x=51, npix_y=51, x0=[10., 10., 10., 10.], y0=[10., 10., 10., 10.]) create_fake(path=self.test_dir + 'binary', ndit=[20, 20, 20, 20], nframes=[20, 20, 20, 20], exp_no=[1, 2, 3, 4], npix=(101, 101), fwhm=3., x0=[50, 50, 50, 50], y0=[50, 50, 50, 50], angles=[[0., 25.], [25., 50.], [50., 75.], [75., 100.]], sep=20., contrast=1.) create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(path=self.test_dir, folders=['star', 'binary']) def test_read_data(self): module = FitsReadingModule(name_in='read1', image_tag='star', input_dir=self.test_dir + 'star', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('star') assert np.allclose(data[0, 10, 10], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00038538535294683216, rtol=limit, atol=0.) assert data.shape == (40, 51, 51) module = FitsReadingModule(name_in='read2', image_tag='binary', input_dir=self.test_dir + 'binary', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('binary') assert np.allclose(data[0, 50, 50], 0.0986064357966972, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00019636787665654158, rtol=limit, atol=0.) assert data.shape == (80, 101, 101) def test_angle_interpolation(self): module = AngleInterpolationModule(name_in='angle', data_tag='binary') self.pipeline.add_module(module) self.pipeline.run_module('angle') data = self.pipeline.get_attribute('binary', 'PARANG', static=False) assert data[5] == 6.578947368421053 assert np.allclose(np.mean(data), 50.0, rtol=limit, atol=0.) assert data.shape == (80, ) parang = self.pipeline.get_attribute('binary', 'PARANG', static=False) self.pipeline.set_attribute('binary', 'PARANG', -1. * parang, static=False) data = self.pipeline.get_attribute('binary', 'PARANG', static=False) assert data[5] == -6.578947368421053 assert np.allclose(np.mean(data), -50.0, rtol=limit, atol=0.) assert data.shape == (80, ) def test_extract_position_none(self): module = StarExtractionModule(name_in='extract1', image_in_tag='star', image_out_tag='extract1', index_out_tag='index', image_size=0.4, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract1') assert len(warning) == 1 assert warning[0].message.args[0] == 'The new dataset that is stored under the tag name ' \ '\'index\' is empty.' data = self.pipeline.get_data('extract1') assert np.allclose(data[0, 7, 7], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444871536643222, rtol=limit, atol=0.) assert data.shape == (40, 15, 15) attr = self.pipeline.get_attribute('extract1', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 10 def test_extract_center_none(self): module = StarExtractionModule(name_in='extract2', image_in_tag='star', image_out_tag='extract2', index_out_tag='index', image_size=0.4, fwhm_star=0.1, position=(None, None, 1.)) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract2') assert len(warning) == 1 assert warning[0].message.args[0] == 'The new dataset that is stored under the tag name ' \ '\'index\' is empty.' data = self.pipeline.get_data('extract2') assert np.allclose(data[0, 7, 7], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444871536643222, rtol=limit, atol=0.) assert data.shape == (40, 15, 15) attr = self.pipeline.get_attribute('extract2', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 10 def test_extract_position(self): module = StarExtractionModule(name_in='extract7', image_in_tag='star', image_out_tag='extract7', index_out_tag=None, image_size=0.4, fwhm_star=0.1, position=(10, 10, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('extract7') data = self.pipeline.get_data('extract7') assert np.allclose(data[0, 7, 7], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.004444871536643222, rtol=limit, atol=0.) assert data.shape == (40, 15, 15) attr = self.pipeline.get_attribute('extract7', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 10 def test_extract_too_large(self): module = StarExtractionModule(name_in='extract3', image_in_tag='star', image_out_tag='extract3', index_out_tag=None, image_size=0.8, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract3') assert len(warning) == 40 for i, item in enumerate(warning): assert item.message.args[0] == f'Chosen image size is too large to crop the image ' \ f'around the brightest pixel (image index = {i}, ' \ f'pixel [x, y] = [10, 10]). Using the center of ' \ f'the image instead.' data = self.pipeline.get_data('extract3') assert np.allclose(data[0, 0, 0], 0.09834884212021108, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0004499242959139202, rtol=limit, atol=0.) assert data.shape == (40, 31, 31) attr = self.pipeline.get_attribute('extract3', 'STAR_POSITION', static=False) assert attr[10, 0] == attr[10, 1] == 25 def test_star_extract_cpu(self): with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = StarExtractionModule(name_in='extract4', image_in_tag='star', image_out_tag='extract4', index_out_tag='index', image_size=0.8, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract4') assert len(warning) == 1 assert warning[0].message.args[0] == 'Chosen image size is too large to crop the image ' \ 'around the brightest pixel. Using the center of ' \ 'the image instead.' def test_extract_binary(self): with h5py.File(self.test_dir + 'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = ExtractBinaryModule(pos_center=(50., 50.), pos_binary=(50., 70.), name_in='extract5', image_in_tag='binary', image_out_tag='extract5', image_size=0.5, search_size=0.2, filter_size=None) self.pipeline.add_module(module) self.pipeline.run_module('extract5') data = self.pipeline.get_data('extract5') assert np.allclose(data[0, 9, 9], 0.09774483733119443, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.0027700881940171283, rtol=limit, atol=0.) assert data.shape == (80, 19, 19) def test_extract_binary_filter(self): module = ExtractBinaryModule(pos_center=(50., 50.), pos_binary=(50., 70.), name_in='extract6', image_in_tag='binary', image_out_tag='extract6', image_size=0.5, search_size=0.2, filter_size=0.1) self.pipeline.add_module(module) self.pipeline.run_module('extract6') data = self.pipeline.get_data('extract6') assert np.allclose(data[0, 9, 9], 0.09774483733119443, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.002770040591615301, rtol=limit, atol=0.) assert data.shape == (80, 19, 19)
class TestFitsWritingModule(object): def setup_class(self): self.test_dir = os.path.dirname(__file__) + "/" create_star_data(path=self.test_dir + "fits") create_config(self.test_dir + "PynPoint_config.ini") self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self): remove_test_data(self.test_dir, folders=["fits"], files=["test.fits"]) def test_fits_reading(self): read = FitsReadingModule(name_in="read", input_dir=self.test_dir + "fits", image_tag="images", overwrite=True, check=True) self.pipeline.add_module(read) self.pipeline.run_module("read") data = self.pipeline.get_data("images") assert np.allclose(data[0, 50, 50], 0.09798413502193704, rtol=limit, atol=0.) assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.) assert data.shape == (40, 100, 100) def test_fits_writing(self): write = FitsWritingModule(file_name="test.fits", name_in="write1", output_dir=None, data_tag="images", data_range=None, overwrite=True) self.pipeline.add_module(write) self.pipeline.run_module("write1") def test_filename_string(self): with pytest.raises(ValueError) as error: FitsWritingModule(file_name=0., name_in="write2", output_dir=None, data_tag="images", data_range=None, overwrite=True) assert str(error.value) == "Output 'file_name' needs to be a string." def test_filename_extension(self): with pytest.raises(ValueError) as error: FitsWritingModule(file_name="test.dat", name_in="write3", output_dir=None, data_tag="images", data_range=None, overwrite=True) assert str( error.value) == "Output 'file_name' requires the FITS extension." def test_data_range(self): write = FitsWritingModule(file_name="test.fits", name_in="write4", output_dir=None, data_tag="images", data_range=(0, 10), overwrite=True) self.pipeline.add_module(write) self.pipeline.run_module("write4") def test_not_overwritten(self): write = FitsWritingModule(file_name="test.fits", name_in="write5", output_dir=None, data_tag="images", data_range=None, overwrite=False) self.pipeline.add_module(write) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("write5") assert len(warning) == 1 assert warning[0].message.args[0] == "Filename already present. Use overwrite=True " \ "to overwrite an existing FITS file." def test_attribute_length(self): text = "long_text_long_text_long_text_long_text_long_text_long_text_long_text_long_text" self.pipeline.set_attribute("images", "short", "value", static=True) self.pipeline.set_attribute("images", "longer_than_eight1", "value", static=True) self.pipeline.set_attribute("images", "longer_than_eight2", text, static=True) write = FitsWritingModule(file_name="test.fits", name_in="write6", output_dir=None, data_tag="images", data_range=None, overwrite=True) self.pipeline.add_module(write) with pytest.warns(UserWarning) as warning: self.pipeline.run_module("write6") assert len(warning) == 1 assert warning[0].message.args[0] == "Key 'hierarch longer_than_eight2' with value " \ "'long_text_long_text_long_text_long_text_long_" \ "text_long_text_long_text_long_text' is too " \ "long for the FITS format. To avoid an error, " \ "the value was truncated to 'long_text_long_text" \ "_long_text_long_text_long_tex'."