def test_add_wcs_default(data_file, tmp_path): """Handle when no pointing exists and the default is used.""" expected_name = 'add_wcs_default.fits' try: stp.add_wcs(data_file, siaf_path=siaf_path, tolerance=0, allow_default=True) except ValueError: pass # This is what we want for the test. except Exception as e: pytest.skip('Live ENGDB service is not accessible.' '\nException={}'.format(e)) # Tests with datamodels.Level1bModel(data_file) as model: # Save for post-test comparison and update model.save(tmp_path / expected_name) with datamodels.open(DATA_PATH / expected_name) as expected: for meta in METAS_EQUALITY: assert model[meta] == expected[meta], f'{meta} has changed' for meta in METAS_ISCLOSE: assert np.isclose(model[meta], expected[meta]), f'{meta} has changed' assert word_precision_check(model.meta.wcsinfo.s_region, expected.meta.wcsinfo.s_region)
def test_add_wcs_method_full_nosiafdb(eng_db_ngas, data_file, tmp_path): """Test using the database""" # Only run if `pysiaf` is installed. pytest.importorskip('pysiaf') expected_name = 'add_wcs_method_full_nosiafdb.fits' # Calculate stp.add_wcs(data_file, method=stp.Methods.OPS_TR_202111, engdb_url='http://localhost') # Tests with datamodels.Level1bModel(data_file) as model: # Save for post-test comparison and update model.save(tmp_path / expected_name) with datamodels.open(DATA_PATH / expected_name) as expected: for meta in METAS_EQUALITY: assert model[meta] == expected[meta] for meta in METAS_ISCLOSE: assert np.isclose(model[meta], expected[meta]) assert word_precision_check(model.meta.wcsinfo.s_region, expected.meta.wcsinfo.s_region)
def test_add_wcs_method_full_siafdb(eng_db_ngas, data_file, tmp_path): """Test using the database and a specified siaf db""" expected_name = 'add_wcs_method_full_siafdb.fits' # Calculate stp.add_wcs(data_file, siaf_path=siaf_path, method=stp.Methods.OPS_TR_202111, engdb_url='http://localhost') # Test with datamodels.Level1bModel(data_file) as model: # Save for post-test comparison and update model.save(tmp_path / expected_name) with datamodels.open(DATA_PATH / expected_name) as expected: for meta in METAS_EQUALITY: if isinstance(model[meta], str): assert model[meta] == expected[meta] else: assert np.isclose(model[meta], expected[meta], atol=1e-13) for meta in METAS_ISCLOSE: assert np.isclose(model[meta], expected[meta]) assert word_precision_check(model.meta.wcsinfo.s_region, expected.meta.wcsinfo.s_region)
def test_add_wcs_method_gscmd(eng_db_ngas, data_file, tmp_path): """Test using the database and the original, pre-JSOCINT-555 algorithms""" expected_name = 'add_wcs_method_gscmd.fits' # Calculate stp.add_wcs(data_file, siaf_path=siaf_path, method=stp.Methods.GSCMD_J3PAGS, engdb_url='http://localhost') # Tests with datamodels.Level1bModel(data_file) as model: # Save for post-test comparison and update model.save(tmp_path / expected_name) with datamodels.open(DATA_PATH / expected_name) as expected: for meta in METAS_EQUALITY: if isinstance(model[meta], str): assert model[meta] == expected[meta], f'{meta} has changed' else: assert np.isclose(model[meta], expected[meta], atol=1e-13), f'{meta} has changed' for meta in METAS_ISCLOSE: assert np.isclose(model[meta], expected[meta]), f'{meta} has changed' assert word_precision_check(model.meta.wcsinfo.s_region, expected.meta.wcsinfo.s_region)
def data_file_nosiaf(): model = datamodels.Level1bModel() model.meta.exposure.start_time = STARTTIME.mjd model.meta.exposure.end_time = ENDTIME.mjd model.meta.target.ra = TARG_RA model.meta.target.dec = TARG_DEC model.meta.aperture.name = "UNKNOWN" model.meta.observation.date = '1/1/2017' with TemporaryDirectory() as path: file_path = os.path.join(path, 'fits_nosiaf.fits') model.save(file_path) model.close() yield file_path
def test_tsgrism_siaf_values(eng_db_ngas, data_file_nosiaf): """ Test that FITS WCS default values were set. """ with datamodels.Level1bModel(data_file_nosiaf) as model: model.meta.exposure.start_time = STARTTIME.mjd model.meta.exposure.end_time = ENDTIME.mjd model.meta.aperture.name = "NRCA5_GRISM256_F444W" model.meta.observation.date = '1/1/2017' model.meta.exposure.type = "NRC_TSGRISM" model.meta.visit.tsovisit = True stp.update_wcs(model, siaf_path=siaf_db) assert model.meta.wcsinfo.siaf_xref_sci == 887 assert model.meta.wcsinfo.siaf_yref_sci == 35
def test_default_siaf_values(eng_db_ngas, data_file_nosiaf): """ Test that FITS WCS default values were set. """ with datamodels.Level1bModel(data_file_nosiaf) as model: model.meta.exposure.start_time = STARTTIME.mjd model.meta.exposure.end_time = ENDTIME.mjd model.meta.target.ra = TARG_RA model.meta.target.dec = TARG_DEC model.meta.aperture.name = "MIRIM_TAFULL" model.meta.observation.date = '1/1/2017' model.meta.exposure.type = "MIR_IMAGE" stp.update_wcs(model, siaf_path=siaf_db, allow_default=False) assert model.meta.wcsinfo.crpix1 == 0 assert model.meta.wcsinfo.crpix2 == 0 assert model.meta.wcsinfo.cdelt1 == 1 assert model.meta.wcsinfo.cdelt2 == 1
def test_add_wcs_fsmcorr_v1(data_file): """Test with default value using FSM original correction""" try: stp.add_wcs(data_file, fsmcorr_version='v1', siaf_path=siaf_db, tolerance=0, allow_default=True) except ValueError: pass # This is what we want for the test. except Exception as e: pytest.skip('Live ENGDB service is not accessible.' '\nException={}'.format(e)) with datamodels.Level1bModel(data_file) as model: assert model.meta.pointing.ra_v1 == TARG_RA assert model.meta.pointing.dec_v1 == TARG_DEC assert model.meta.pointing.pa_v3 == 0. assert model.meta.wcsinfo.wcsaxes == 2 assert model.meta.wcsinfo.crpix1 == 693.5 assert model.meta.wcsinfo.crpix2 == 512.5 assert model.meta.wcsinfo.crval1 == TARG_RA assert model.meta.wcsinfo.crval2 == TARG_DEC assert model.meta.wcsinfo.ctype1 == "RA---TAN" assert model.meta.wcsinfo.ctype2 == "DEC--TAN" assert model.meta.wcsinfo.cunit1 == 'deg' assert model.meta.wcsinfo.cunit2 == 'deg' assert np.isclose(model.meta.wcsinfo.cdelt1, 3.0555555e-5) assert np.isclose(model.meta.wcsinfo.cdelt2, 3.0555555e-5) assert np.isclose(model.meta.wcsinfo.pc1_1, -0.7558009243361943) assert np.isclose(model.meta.wcsinfo.pc1_2, 0.654801468211972) assert np.isclose(model.meta.wcsinfo.pc2_1, 0.654801468211972) assert np.isclose(model.meta.wcsinfo.pc2_2, 0.7558009243361943) assert model.meta.wcsinfo.v2_ref == 200.0 assert model.meta.wcsinfo.v3_ref == -350.0 assert model.meta.wcsinfo.vparity == -1 assert model.meta.wcsinfo.v3yangle == 42.0 assert model.meta.wcsinfo.ra_ref == TARG_RA assert model.meta.wcsinfo.dec_ref == TARG_DEC assert np.isclose(model.meta.wcsinfo.roll_ref, 358.9045979379) assert word_precision_check(model.meta.wcsinfo.s_region, ('POLYGON ICRS' ' 345.11054995209815 -87.02586884935684' ' 344.6537904121288 -87.00498014679253' ' 345.04569816117015 -86.98138111042982' ' 345.50498899320183 -87.00187988107017'))
def data_file(tmp_path): model = datamodels.Level1bModel() model.meta.exposure.start_time = STARTTIME.mjd model.meta.exposure.end_time = ENDTIME.mjd model.meta.target.ra = TARG_RA model.meta.target.dec = TARG_DEC model.meta.guidestar.gs_ra = TARG_RA + 0.0001 model.meta.guidestar.gs_dec = TARG_DEC + 0.0001 model.meta.aperture.name = "MIRIM_FULL" model.meta.observation.date = '2017-01-01' model.meta.exposure.type = "MIR_IMAGE" model.meta.ephemeris.velocity_x = -25.021 model.meta.ephemeris.velocity_y = -16.507 model.meta.ephemeris.velocity_z = -7.187 file_path = tmp_path / 'file.fits' model.save(file_path) model.close() yield file_path
def test_default_siaf_values(eng_db_ngas, data_file_nosiaf): """ Test that FITS WCS default values were set. """ with datamodels.Level1bModel(data_file_nosiaf) as model: model.meta.exposure.start_time = STARTTIME.mjd model.meta.exposure.end_time = ENDTIME.mjd model.meta.target.ra = TARG_RA model.meta.target.dec = TARG_DEC model.meta.aperture.name = "MIRIM_TAFULL" model.meta.observation.date = '2017-01-01' model.meta.exposure.type = "MIR_IMAGE" stp.update_wcs(model, siaf_path=siaf_path, allow_default=False, engdb_url='http://localhost') assert model.meta.wcsinfo.crpix1 == 24.5 assert model.meta.wcsinfo.crpix2 == 24.5 assert model.meta.wcsinfo.cdelt1 == 3.067124166666667e-05 assert model.meta.wcsinfo.cdelt2 == 3.090061944444444e-05
def data_file_fromsim(tmp_path): """Create data using times that were executed during a simulation using the OTB Simulator""" model = datamodels.Level1bModel() model.meta.exposure.start_time = Time('2022-02-02T22:24:58.942').mjd model.meta.exposure.end_time = Time('2022-02-02T22:26:24.836').mjd model.meta.target.ra = TARG_RA model.meta.target.dec = TARG_DEC model.meta.guidestar.gs_ra = TARG_RA + 0.0001 model.meta.guidestar.gs_dec = TARG_DEC + 0.0001 model.meta.guidestar.gs_pcs_mode = 'COARSE' model.meta.aperture.name = "MIRIM_FULL" model.meta.observation.date = '2017-01-01' model.meta.exposure.type = "MIR_IMAGE" model.meta.ephemeris.velocity_x_bary = -25.021 model.meta.ephemeris.velocity_y_bary = -16.507 model.meta.ephemeris.velocity_z_bary = -7.187 file_path = tmp_path / 'file_fromsim.fits' model.save(file_path) model.close() yield file_path
def test_add_wcs_with_db(eng_db_ngas, data_file, tmp_path): """Test using the database""" expected_name = 'add_wcs_with_db.fits' stp.add_wcs(data_file, siaf_path=siaf_path, engdb_url='http://localhost') # Tests with datamodels.Level1bModel(data_file) as model: # Save for post-test comparison and update model.save(tmp_path / expected_name) with datamodels.open(DATA_PATH / expected_name) as expected: for meta in METAS_EQUALITY: assert model[meta] == expected[meta] for meta in METAS_ISCLOSE: assert np.isclose(model[meta], expected[meta]) assert word_precision_check(model.meta.wcsinfo.s_region, expected.meta.wcsinfo.s_region)
def test_add_wcs_with_db_fsmcorr_v1(eng_db_ngas, data_file): """Test using the database with original FSM correction""" stp.add_wcs(data_file, fsmcorr_version='v1', siaf_path=siaf_db, j2fgs_transpose=False) with datamodels.Level1bModel(data_file) as model: assert np.isclose(model.meta.pointing.ra_v1, 348.9278669) assert np.isclose(model.meta.pointing.dec_v1, -38.749239) assert np.isclose(model.meta.pointing.pa_v3, 50.1767077) assert model.meta.wcsinfo.wcsaxes == 2 assert model.meta.wcsinfo.crpix1 == 693.5 assert model.meta.wcsinfo.crpix2 == 512.5 assert np.isclose(model.meta.wcsinfo.crval1, 348.8776709) assert np.isclose(model.meta.wcsinfo.crval2, -38.854159) assert model.meta.wcsinfo.ctype1 == "RA---TAN" assert model.meta.wcsinfo.ctype2 == "DEC--TAN" assert model.meta.wcsinfo.cunit1 == 'deg' assert model.meta.wcsinfo.cunit2 == 'deg' assert np.isclose(model.meta.wcsinfo.cdelt1, 3.0555555e-5) assert np.isclose(model.meta.wcsinfo.cdelt2, 3.0555555e-5) assert np.isclose(model.meta.wcsinfo.pc1_1, 0.03853303979862607) assert np.isclose(model.meta.wcsinfo.pc1_2, 0.9992573266400789) assert np.isclose(model.meta.wcsinfo.pc2_1, 0.9992573266400789) assert np.isclose(model.meta.wcsinfo.pc2_2, -0.03853303979862607) assert model.meta.wcsinfo.v2_ref == 200.0 assert model.meta.wcsinfo.v3_ref == -350.0 assert model.meta.wcsinfo.vparity == -1 assert model.meta.wcsinfo.v3yangle == 42.0 assert np.isclose(model.meta.wcsinfo.ra_ref, 348.8776709) assert np.isclose(model.meta.wcsinfo.dec_ref, -38.854159) assert np.isclose(model.meta.wcsinfo.roll_ref, 50.20832726650) assert word_precision_check( model.meta.wcsinfo.s_region, ('POLYGON ICRS' ' 348.8563379013152 -38.874810886750495' ' 348.85810582665334 -38.84318773861823' ' 348.8982592685148 -38.84439628911871' ' 348.89688051688233 -38.876020020321164'))
def data_file(): model = datamodels.Level1bModel() model.meta.target.ra = 0. model.meta.target.dec = 0. model.meta.ephemeris.time = 55727.0 model.meta.ephemeris.spatial_x = -34305.4075983316 model.meta.ephemeris.spatial_y = 1049528.04998405 model.meta.ephemeris.spatial_z = 679175.58185602 model.meta.ephemeris.velocity_x = -0.548663244644384 model.meta.ephemeris.velocity_y = -0.103904924724239 model.meta.ephemeris.velocity_z = 0.000982870964178323 # Assign dummy values to the last three columns. model.int_times = \ [(1, 55728.0, 55728.00032119, 55728.00064237, -1., -1., -1.), (2, 55728.00064237, 55728.00096356, 55728.00128474, -1., -1., -1.), (3, 55728.00128474, 55728.00160593, 55728.00192711, -1., -1., -1.), (4, 55728.00192711, 55728.0022483, 55728.00256948, -1., -1., -1.)] with TemporaryDirectory() as path: file_path = os.path.join(path, 'int_times.fits') model.save(file_path) yield file_path
def test_add_wcs_with_mast(data_file_fromsim, fgsid, tmp_path): """Test using the database""" expected_name = f'add_wcs_with_mast_fgs{fgsid}.fits' # See if access to MAST is available. try: engdb_mast.EngdbMast(base_url=engdb_mast.MAST_BASE_URL) except RuntimeError as exception: pytest.skip( f'Live MAST Engineering Service not available: {exception}') # Execute the operation. try: stp.add_wcs(data_file_fromsim, siaf_path=siaf_path, engdb_url=engdb_mast.MAST_BASE_URL, fgsid=fgsid) except ValueError as exception: pytest.xfail( f'No telemetry exists. Update test to use existing telemetry. Exception: {exception}' ) # Tests with datamodels.Level1bModel(data_file_fromsim) as model: # Save for post-test comparison and update model.save(tmp_path / expected_name) with datamodels.open(DATA_PATH / expected_name) as expected: for meta in METAS_EQUALITY: assert model[meta] == expected[meta] for meta in METAS_ISCLOSE: assert np.isclose(model[meta], expected[meta]) assert word_precision_check(model.meta.wcsinfo.s_region, expected.meta.wcsinfo.s_region)
def file_case(request, tmp_path): """Generate files with different model states""" case, allow = request.param if case == 'good_model': # Make a model that will always succeed model = datamodels.Level1bModel((10, 10, 10, 10)) path = tmp_path / 'level1bmodel.fits' model.save(path) elif case == 'bad_model': # Make a model that will fail if not allowed model = datamodels.IFUCubeModel((10, 10, 10)) path = tmp_path / 'image.fits' model.save(path) elif case == 'fits_nomodel': # Create just a plain anything FITS hdu = fits.PrimaryHDU() hdul = fits.HDUList([hdu]) path = tmp_path / 'empty.fits' hdul.writeto(path) else: assert False, f'Cannot produce a file for {case}' return path, allow
dark.close() # Zero-out reference pixel values so that refpix step has no effect data4[0, 0, :4, :] = 0.0 data4[0, 0, -4:, :] = 0.0 data4[0, 0, :, :4] = 0.0 data4[0, 0, :, -4:] = 0.0 # Add superbias into science array bias_file = '/grp/crds/cache/references/jwst/jwst_nirspec_superbias_0030.fits' bias = datamodels.SuperBiasModel(bias_file) data4[0, 0] += bias.data bias.close() # Stuff the data and meta data into an output model output = datamodels.Level1bModel(data=data4) output.update(input) # Set important meta data values output.meta.exposure.ngroups = 1 output.meta.exposure.frame_divisor = 1 output.meta.wcsinfo.cdelt1 = input.meta.wcsinfo.cdelt1 / 3600. output.meta.wcsinfo.cdelt2 = input.meta.wcsinfo.cdelt2 / 3600. output.meta.wcsinfo.pc1_1 = -1.0 output.meta.wcsinfo.v2_ref = 378.770400 output.meta.wcsinfo.v3_ref = -428.155200 output.meta.wcsinfo.v3yangle = 138.492300 output.meta.wcsinfo.vparity = -1 output.meta.wcsinfo.ra_ref = output.meta.wcsinfo.crval1 output.meta.wcsinfo.dec_ref = output.meta.wcsinfo.crval2 output.meta.wcsinfo.roll_ref = 0.0