示例#1
0
def create_asdf_in_fits(dtype):
    """Test fixture to create AsdfInFits object to use for testing"""
    hdulist = fits.HDUList()
    hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype)))
    hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype)))
    hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype)))

    tree = {
        'model': {
            'sci': {
                'data': hdulist[0].data,
                'wcs': 'WCS info'
            },
            'dq': {
                'data': hdulist[1].data,
                'wcs': 'WCS info'
            },
            'err': {
                'data': hdulist[2].data,
                'wcs': 'WCS info'
            }
        }
    }

    return fits_embed.AsdfInFits(hdulist, tree)
示例#2
0
def to_fits(tree, schema):
    hdulist = fits.HDUList()
    hdulist.append(fits.PrimaryHDU())

    _save_from_schema(hdulist, tree, schema)
    _save_extra_fits(hdulist, tree)
    _save_history(hdulist, tree)

    asdf = fits_embed.AsdfInFits(hdulist, tree)
    return asdf
示例#3
0
def test_embed_asdf_in_fits_file(tmpdir, backwards_compat, dtype):
    fits_testfile = str(tmpdir.join('test.fits'))
    asdf_testfile = str(tmpdir.join('test.asdf'))

    hdulist = fits.HDUList()
    hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype), name='SCI'))
    hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype), name='DQ'))
    # Test a name with underscores to make sure it works
    hdulist.append(
        fits.ImageHDU(np.arange(512, dtype=dtype), name='WITH_UNDERSCORE'))

    tree = {
        'model': {
            'sci': {
                'data': hdulist['SCI'].data,
                'wcs': 'WCS info'
            },
            'dq': {
                'data': hdulist['DQ'].data,
                'wcs': 'WCS info'
            },
            'with_underscore': {
                'data': hdulist['WITH_UNDERSCORE'].data,
                'wcs': 'WCS info'
            }
        }
    }

    ff = fits_embed.AsdfInFits(hdulist, tree)
    ff.write_to(fits_testfile, use_image_hdu=backwards_compat)

    with fits.open(fits_testfile) as hdulist2:
        assert len(hdulist2) == 4
        assert [x.name
                for x in hdulist2] == ['SCI', 'DQ', 'WITH_UNDERSCORE', 'ASDF']
        assert_array_equal(hdulist2[0].data, np.arange(512, dtype=dtype))
        asdf_hdu = hdulist2['ASDF']
        assert asdf_hdu.data.tobytes().startswith(b'#ASDF')
        # When in backwards compatibility mode, the ASDF file will be contained
        # in an ImageHDU
        if backwards_compat:
            assert isinstance(asdf_hdu, fits.ImageHDU)
            assert asdf_hdu.data.tobytes().strip().endswith(b'...')
        else:
            assert isinstance(asdf_hdu, fits.BinTableHDU)

        with fits_embed.AsdfInFits.open(hdulist2) as ff2:
            assert_tree_match(tree, ff2.tree)

            ff = asdf.AsdfFile(copy.deepcopy(ff2.tree))
            ff.write_to(asdf_testfile)

    with asdf.open(asdf_testfile) as ff:
        assert_tree_match(tree, ff.tree)
示例#4
0
def to_fits(tree, schema):
    hdulist = fits.HDUList()
    hdulist.append(fits.PrimaryHDU())

    _save_from_schema(hdulist, tree, schema)
    _save_extra_fits(hdulist, tree)
    _save_history(hdulist, tree)

    # Store the FITS hash in the tree
    tree[FITS_HASH_KEY] = fits_hash(hdulist)

    asdf = fits_embed.AsdfInFits(hdulist, tree)
    return asdf
示例#5
0
def test_serialize_table(tmpdir):
    tmpfile = str(tmpdir.join('table.fits'))

    data = np.random.random((10, 10))
    table = Table(data)

    hdu = fits.BinTableHDU(table)
    hdulist = fits.HDUList()
    hdulist.append(hdu)

    tree = {'my_table': hdulist[1].data}
    with fits_embed.AsdfInFits(hdulist, tree) as ff:
        ff.write_to(tmpfile)

    with asdf.open(tmpfile) as ff:
        data = ff.tree['my_table']
        assert data._source.startswith('fits:')
示例#6
0
def test_create_in_tree_first(tmpdir, dtype):
    tree = {
        'model': {
            'sci': {
                'data': np.arange(512, dtype=dtype),
                'wcs': 'WCS info'
            },
            'dq': {
                'data': np.arange(512, dtype=dtype),
                'wcs': 'WCS info'
            },
            'err': {
                'data': np.arange(512, dtype=dtype),
                'wcs': 'WCS info'
            }
        }
    }

    hdulist = fits.HDUList()
    hdulist.append(fits.ImageHDU(tree['model']['sci']['data']))
    hdulist.append(fits.ImageHDU(tree['model']['dq']['data']))
    hdulist.append(fits.ImageHDU(tree['model']['err']['data']))

    tmpfile = os.path.join(str(tmpdir), 'test.fits')
    with fits_embed.AsdfInFits(hdulist, tree) as ff:
        ff.write_to(tmpfile)

    with asdf.AsdfFile(tree) as ff:
        ff.write_to(os.path.join(str(tmpdir), 'plain.asdf'))

    with asdf.open(os.path.join(str(tmpdir), 'plain.asdf')) as ff:
        assert_array_equal(ff.tree['model']['sci']['data'],
                           np.arange(512, dtype=dtype))

    # This tests the changes that allow FITS files with ASDF extensions to be
    # opened directly by the top-level asdf.open API
    with asdf_open(tmpfile) as ff:
        assert_array_equal(ff.tree['model']['sci']['data'],
                           np.arange(512, dtype=dtype))
示例#7
0
def cube(tmpdir, tmp_asdf):
    """ Mock a JWST s3d cube """
    hdulist = fits.HDUList()
    hdulist.append(fits.PrimaryHDU())
    hdulist["PRIMARY"].header["TELESCOP"] = ("JWST", "comment")
    hdulist["PRIMARY"].header["FLUXEXT"] = ("ERR", "comment")
    hdulist["PRIMARY"].header["ERREXT"] = ("ERR", "comment")
    hdulist["PRIMARY"].header["MASKEXT"] = ("DQ", "comment")
    # Add ImageHDU for cubes
    shape = [30, 10, 10]
    hdulist.append(create_image_hdu(name='SCI', shape=shape, hdrs=[("BUNIT", 'MJy')]))
    hdulist.append(create_image_hdu(name='ERR', shape=shape,
                                    hdrs=[("BUNIT", 'MJy'), ('ERRTYPE', 'ERR')]))
    hdulist.append(create_image_hdu(name='DQ', shape=shape))

    # Mock the ASDF extension
    hdulist.append(fits.BinTableHDU(name='ASDF'))
    ff = fits_embed.AsdfInFits(hdulist, tmp_asdf)
    tmpfile = str(tmpdir.join('jwst_embedded_asdf.fits'))
    ff.write_to(tmpfile)

    return hdulist
def perform_dither_distortion(plot=False):
    """
    Dither the initial direct image and apply detector distortion.

    Args:
        plot (Bool): True if plots should be saved.
    """

    # Make four dithered exposures following the standard template pattern
    # or whatever specified pattern. It reopens image just created and it
    # uses a reference rate image to create the right and complete header.

    # Dithers for WFSS (use the sim image just created)
    ref = fits.open('{0}-{1}.fits'.format(OUTROOT, NIS_FILTER.lower()))
    ref_model = grizli.jwst.hdu_to_imagemodel(ref[0])
    blot_parent = gwcs_blot.GWCSBlot(ref_model)
    # Center position on the image.
    r0 = RA_CENTER * u.deg
    d0 = DEC_CENTER * u.deg

    # Dither pattern
    dx = DITHER_X
    dy = DITHER_Y

    if plot is True:
        plt.figure(figsize=(20, 20))

    #Make the 4 dithers with distortion
    for pos in range(4):
        rate_im, aa = read_rate_im()
        aa['data'] = rate_im['SCI'].data
        aa['dq'] = rate_im['DQ'].data
        wcsinfo = aa['meta']['wcsinfo']
        pointing = aa['meta']['pointing']
        inst = aa['meta']['instrument']
        print('Position {0}, {1}'.format(pos + 1, NIS_FILTER))
        inst['filter'] = rate_im[0].header['FILTER'] = 'CLEAR'
        inst['pupil'] = rate_im[0].header['PUPIL'] = NIS_FILTER.upper()

        cosd = np.cos(d0 / 180 * np.pi)
        pos_ra = (r0 + dx[pos] / cosd * u.arcsec).value
        pos_dec = (d0 + dy[pos] * u.arcsec).value

        #Update header in rate image and wcf info
        rate_im[1].header['CRVAL1'] = rate_im[1].header['RA_REF'] = pos_ra
        rate_im[1].header['CRVAL2'] = rate_im[1].header['DEC_REF'] = pos_dec
        wcsinfo['crval1'] = wcsinfo['ra_ref'] = pointing['ra_v1'] = pos_ra
        wcsinfo['crval2'] = wcsinfo['dec_ref'] = pointing['dec_v1'] = pos_dec

        # Blot reference data to distorted frame
        rate_model = grizli.jwst.img_with_wcs(rate_im)
        blot_child = blot_parent.extract_image(rate_model, interp='poly5')
        rate_im['SCI'].data = blot_child

        # zero-out DQ array
        rate_im['DQ'].data *= 0

        #        rate_im[0].header['TARGNAME'] = aa['meta']['target'] = OUTROOT

        print('{0}-{1}-clear-pos{2}_rate.fits'.format(OUTROOT, NIS_FILTER,
                                                      pos + 1))
        ff = fits_embed.AsdfInFits(rate_im, {'meta': aa['meta']})
        ff.write_to('{0}-{1}-clear-pos{2}_rate.fits'.format(
            OUTROOT, NIS_FILTER, pos + 1),
                    overwrite=True)

        # Make grism files (sci extension is direct image for now)
        for grism in ['gr150r', 'gr150c']:
            inst['filter'] = rate_im[0].header['FILTER'] = grism.upper()
            out = '{0}-{1}-{grism}-pos{2}_rate.fits'.format(OUTROOT,
                                                            NIS_FILTER,
                                                            pos + 1,
                                                            grism=grism)
            print(out)
            ff = fits_embed.AsdfInFits(rate_im, aa['meta'])
            ff.write_to(out, overwrite=True)

        rate_im.close()

        # Show dithered images
        if plot is True:
            plt.subplot(2, 2, pos + 1, projection=pywcs.WCS(rate_im[1].header))
            plt.title("Dither=({}, {})".format(dx[pos], dy[pos]))
            plt.imshow(np.log10(rate_im['SCI'].data))
            plt.grid(color='black', ls='solid')

    figname = "{}_dithers.png".format(OUTROOT)
    plt.savefig(figname)
    print("Wrote {}".format(figname))