Пример #1
0
def cleanup_mask(mask, n):
    """Eliminate small islands in the mask"""
    m = minify(mask, n).astype(np.uint8)
    m = m & modal(m, ELEMENT)
    return oversample(m, n).astype(bool)
Пример #2
0
except IndexError:
    sys.exit(f"Usage: {sys.argv[0]} FITSFILE")

hdu = fits.open(infile)[0]
if hdu.data is None:
    hdu = fits.open(infile)[1]
hdr = hdu.header

# Maximum binning
nmax = nlist[-1]
# Pad arrays to nearest multiple of nmax
im = pad_array(hdu.data, nmax)

# First version does not use a separate weight array, and uses NaN as mask
w = np.ones_like(im)
# Maybe do a star mask later
m = np.isfinite(im)

for n, mingood in zip(nlist, mingoods):
    im[~m] = 0.0
    outfile = infile.replace(".fits", f"-bin{n:03d}.fits")
    print("Saving", outfile)
    # Save both the scaled image and the weights after scaling back up to full res
    fits.HDUList([
        fits.PrimaryHDU(),
        fits.ImageHDU(data=oversample(im, n), header=hdr, name="scaled"),
        fits.ImageHDU(data=oversample(w, n), header=hdr, name="weight"),
    ]).writeto(outfile, overwrite=True)
    # Now do the rebinning by a factor of two
    [im,], m, w = downsample([im,], m, weights=w, mingood=mingood)
Пример #3
0
def cleanup_mask(mask, n):
    """Eliminate small islands in the mask"""
    m = minify(mask, n).astype(np.uint8)
    m = m & modal(m, ELEMENT)
    return oversample(m, n).astype(bool)
Пример #4
0
    w = pad_array(whdu.data, nmax)
else:
    # Otherwise, just natural weighting
    w = np.ones_like(im)

continuum = fits.open('muse-hr-image-wfc3-f547m.fits')['DATA'].data
starmask = continuum > 150

# If we pad the starmask and combine it with the padded image, then we
# automatically deal with the case where the input files have already
# been padded
m =  np.isfinite(im) & (~pad_array(starmask, nmax))

for n, mingood in zip(nlist, mingoods):
    im[~m] = 0.0
    outfile = infile.replace('.fits', '-bin{:03d}.fits'.format(n))
    if n == nlist[0]:
        # Do dependency checking on the first iteration
        if not newer(infile, outfile):
            # Bail out if dependency not newer than target
            sys.exit(outfile + ' is already up to date.')
    print('Saving', outfile)
    # Save both the scaled image and the weights, but at the full resolution
    fits.HDUList([
        fits.PrimaryHDU(),
        fits.ImageHDU(data=oversample(im, n), header=hdr, name='scaled'),
        fits.ImageHDU(data=oversample(w, n), header=hdr, name='weight'),
    ]).writeto(outfile, clobber=True)
    # Now do the rebinning by a factor of two
    [im,], m, w = downsample([im,], m, weights=w, mingood=mingood)
Пример #5
0
from astropy.io import fits

nlist = [1, 2, 4, 8, 16, 32, 64]
mingoods = [2, 2, 2, 1, 1, 1, 2]

try: 
    infile = sys.argv[1]
except:
    print('Usage:', sys.argv[0], 'FITSFILE')
    sys.exit()

hdulist = fits.open(infile)
hdr = hdulist['scaled'].header
im = hdulist['scaled'].data
w = hdulist['weight'].data
m = w > 0.0

for n, mingood in zip(nlist, mingoods):
    im[~m] = 0.0
    outfile = infile.replace('.fits', '-bin{:03d}.fits'.format(n))
    print('Saving', outfile)
    # Save both the scaled image and the weights, but at the full resolution
    fits.HDUList([
        fits.PrimaryHDU(),
        fits.ImageHDU(data=oversample(im, n), header=hdr, name='scaled'),
        fits.ImageHDU(data=oversample(w, n), header=hdr, name='weight'),
    ]).writeto(outfile, clobber=True)
    # Now do the rebinning by a factor of two
    [im,], m, w = downsample([im,], m, weights=w, mingood=mingood)
# Program\ to\ do\ multigridding\ of\ new\ spectral\ maps:\ multibin-map\.py:1 ends here