Esempio n. 1
0
    def setUp(self):
        xidplus.__path__[0]
        # Folder containing maps
        imfolder = xidplus.__path__[0] + '/../test_files/'

        pswfits = imfolder + 'cosmos_itermap_lacey_07012015_simulated_observation_w_noise_PSW_hipe.fits.gz'  # SPIRE 250 map
        pmwfits = imfolder + 'cosmos_itermap_lacey_07012015_simulated_observation_w_noise_PMW_hipe.fits.gz'  # SPIRE 350 map
        plwfits = imfolder + 'cosmos_itermap_lacey_07012015_simulated_observation_w_noise_PLW_hipe.fits.gz'  # SPIRE 500 map

        # Folder containing prior input catalogue
        catfolder = xidplus.__path__[0] + '/../test_files/'
        # prior catalogue
        prior_cat = 'lacey_07012015_MillGas.ALLVOLS_cat_PSW_COSMOS_test.fits'

        # output folder
        output_folder = './'

        # -----250-------------
        hdulist = fits.open(pswfits)
        im250phdu = hdulist[0].header
        im250hdu = hdulist[1].header

        im250 = hdulist[1].data * 1.0E3  # convert to mJy
        nim250 = hdulist[2].data * 1.0E3  # convert to mJy
        w_250 = wcs.WCS(hdulist[1].header)
        pixsize250 = 3600.0 * w_250.wcs.cd[1, 1]  # pixel size (in arcseconds)
        hdulist.close()
        # -----350-------------
        hdulist = fits.open(pmwfits)
        im350phdu = hdulist[0].header
        im350hdu = hdulist[1].header

        im350 = hdulist[1].data * 1.0E3  # convert to mJy
        nim350 = hdulist[2].data * 1.0E3  # convert to mJy
        w_350 = wcs.WCS(hdulist[1].header)
        pixsize350 = 3600.0 * w_350.wcs.cd[1, 1]  # pixel size (in arcseconds)
        hdulist.close()
        # -----500-------------
        hdulist = fits.open(plwfits)
        im500phdu = hdulist[0].header
        im500hdu = hdulist[1].header
        im500 = hdulist[1].data * 1.0E3  # convert to mJy
        nim500 = hdulist[2].data * 1.0E3  # convert to mJy
        w_500 = wcs.WCS(hdulist[1].header)
        pixsize500 = 3600.0 * w_500.wcs.cd[1, 1]  # pixel size (in arcseconds)
        hdulist.close()

        hdulist = fits.open(catfolder + prior_cat)
        fcat = hdulist[1].data
        hdulist.close()
        inra = fcat['RA']
        indec = fcat['DEC']
        # select only sources with 100micron flux greater than 50 microJy
        sgood = fcat['S100'] > 0.050
        inra = inra[sgood]
        indec = indec[sgood]

        from astropy.coordinates import SkyCoord
        from astropy import units as u
        c = SkyCoord(ra=[150.74] * u.degree, dec=[2.03] * u.degree)
        import pymoc
        moc = pymoc.util.catalog.catalog_to_moc(c, 100, 15)

        # ---prior250--------
        prior250 = xidplus.prior(
            im250, nim250, im250phdu, im250hdu, moc=moc
        )  # Initialise with map, uncertianty map, wcs info and primary header
        prior250.prior_cat(inra, indec, prior_cat)  # Set input catalogue
        prior250.prior_bkg(
            -5.0, 5
        )  # Set prior on background (assumes Gaussian pdf with mu and sigma)
        # ---prior350--------
        prior350 = xidplus.prior(im350, nim350, im350phdu, im350hdu, moc=moc)
        prior350.prior_cat(inra, indec, prior_cat)
        prior350.prior_bkg(-5.0, 5)

        # ---prior500--------
        prior500 = xidplus.prior(im500, nim500, im500phdu, im500hdu, moc=moc)
        prior500.prior_cat(inra, indec, prior_cat)
        prior500.prior_bkg(-5.0, 5)

        self.priors = [prior250, prior350, prior500]
def fourier_combine(
    highresfitsfile,
    lowresfitsfile,
    matching_scale=60 * u.arcsec,
    scale=False,
    return_hdu=False,
):
    """
    Simple reimplementation of 'feather' for 2D images
    """
    raise "Obsolete"
    f1 = fits.open(highresfitsfile)
    w1 = wcs.WCS(f1[0].header)
    f2 = fits.open(lowresfitsfile)
    w2 = wcs.WCS(f2[0].header)

    nax1, nax2 = f1[0].header['NAXIS1'], f1[0].header['NAXIS2']
    # We take care of zooming later...
    #if not(nax1 == f2[0].header['NAXIS1'] and nax2 == f2[0].header['NAXIS2']):
    #    raise ValueError("Images are not in the same pixel space; reproject "
    #                     "them to common pixel space first.")

    pixscale1 = w1.wcs.get_cdelt()[1]
    pixscale2 = w2.wcs.get_cdelt()[1]

    center = w1.sub([wcs.WCSSUB_CELESTIAL]).wcs_pix2world([nax1 / 2.],
                                                          [nax2 / 2.], 1)
    frame = 'icrs' if w1.celestial.wcs.ctype[0][:2] == 'RA' else 'galactic'
    if w2.celestial.wcs.ctype[0][:2] == 'RA':
        center = coordinates.SkyCoord(*(center * u.deg), frame=frame).fk5
        cxy = center.ra.deg, center.dec.deg
    elif w2.celestial.wcs.ctype[0][:4] == 'GLON':
        center = coordinates.SkyCoord(*(center * u.deg), frame=frame).galactic
        cxy = center.l.deg, center.b.deg

    im1 = f1[0].data.squeeze()
    im1[np.isnan(im1)] = 0
    shape = im1.shape
    im2raw = f2[0].data.squeeze()
    im2raw[np.isnan(im2raw)] = 0
    if len(shape) != im2raw.ndim:
        raise ValueError("Different # of dimensions in the interferometer and "
                         "single-dish images")
    if len(shape) == 3:
        if shape[0] != im2raw.shape[0]:
            raise ValueError("Spectral dimensions of cubes do not match.")

    center_pixel = w2.sub([wcs.WCSSUB_CELESTIAL
                           ]).wcs_world2pix(cxy[0], cxy[1], 0)[::-1]

    zoomed = zoom_on_pixel(np.nan_to_num(im2raw),
                           center_pixel,
                           usfac=np.abs(pixscale2 / pixscale1),
                           outshape=shape)

    im2 = zoomed

    xax, psd1 = fft_psd_tools.PSD2(im1, oned=True)
    xax, psd2 = fft_psd_tools.PSD2(im2, oned=True)

    xax_as = (pixscale1 / xax * u.deg).to(u.arcsec)

    if scale:
        closest_point = np.argmin(np.abs(xax_as - matching_scale))

        scale_2to1 = (psd1[closest_point] / psd2[closest_point])**0.5
    else:
        scale_2to1 = 1

    fft1 = np.fft.fft2(im1)
    fft2 = np.fft.fft2(im2) * scale_2to1

    xgrid, ygrid = (np.indices(shape) -
                    np.array([(shape[0] - 1.) / 2,
                              (shape[1] - 1.) / 2.])[:, None, None])

    sigma = np.abs(shape[0] / (
        (matching_scale /
         (pixscale1 * u.deg)).decompose().value)) / np.sqrt(8 * np.log(2))
    kernel = np.fft.fftshift(np.exp(-(xgrid**2 + ygrid**2) / (2 * sigma**2)))
    kernel /= kernel.max()

    fftsum = kernel * fft2 + (1 - kernel) * fft1

    combo = np.fft.ifft2(fftsum)

    if not return_hdu:
        return combo
    elif return_hdu:
        combo_hdu = fits.PrimaryHDU(data=np.abs(combo), header=w1.to_header())
        return combo_hdu
Esempio n. 3
0
def test_fits_transform():
    hdr = fits.Header.fromfile(get_pkg_data_filename('data/simple_wcs2.hdr'))
    gw1 = gwutils.make_fitswcs_transform(hdr)
    w1 = fitswcs.WCS(hdr)
    assert_allclose(gw1(1, 2), w1.wcs_pix2world(1, 2, 1), atol=10**-8)
Esempio n. 4
0
def add_results(data, imagestretch='linear'):
    """
    add results to website
    """

    ### create lightcurve plots for each target

    data['lightcurveplots'] = {}
    for target in data['targetnames']:

        if sys.version_info < (3, 0):
            target = str(target)
            
        logging.info('create lightcurve plot for %s' % target)
        plt.plot()
        plt.title(target)
        plt.xlabel('Observation Midtime (JD)')
        plt.ylabel('Magnitude')
        plt.errorbar([dat[9][0] for dat in data[target]], 
                     [dat[7] for dat in data[target]], 
                     yerr=[dat[8] for dat in data[target]],
                     linestyle='', color='black')
        plt.ylim([plt.ylim()[1], plt.ylim()[0]])
        plt.grid()
        plt.savefig('.diagnostics/'
                    + ('%s.png' % target.translate(_pp_conf.target2filename)),
                    format='png')
        plt.close()
        data['lightcurveplots'][target] = ('.diagnostics/' + '%s.png' % 
                                           target.translate(_pp_conf.target2filename))

    ##### create thumbnail images
    
    data['thumbnailplots'] = {}
    data['gifs'] = {}
    boxsize = 300 # thumbnail boxsize
    for target in data['targetnames']:

        if sys.version_info < (3, 0):
            target = str(target)
            
        data['thumbnailplots'][target] = []
        for dat in data[target]:
            for fitsfilename in ['.fits', '.fit']:
                fitsfilename = dat[10][:dat[10].find('.ldac')]+fitsfilename
                if os.path.isfile(fitsfilename):
                    break
                #= dat[10][:dat[10].find('.ldac')]+'.fits'
            hdulist = fits.open(fitsfilename, ignore_missing_end=True)

            logging.info('create thumbnail image for %s/%s' % (target, 
                                                            fitsfilename))

            # turn relevant header keywords into floats
            # should be fixed in astropy.wcs
            for key, val in list(hdulist[0].header.items()):
                if 'CD1' in key or 'CD2' in key or \
                   'CRVAL' in key or 'CRPIX' in key or \
                   'EQUINOX' in key:
                    hdulist[0].header[key] = float(val)
                # if 'PV1' in key or 'PV2' in key:            
                #     del hdulist[0].header[key]
            
            w = wcs.WCS(hdulist[0].header)
            obj_x, obj_y = dat[11], dat[12]
            image_coords = w.wcs_world2pix(numpy.array([[dat[1], dat[2]]]), 
                                           True)
            exp_x, exp_y = image_coords[0][0], image_coords[0][1]

            # create margin around image allowing for any cropping 
            composite = numpy.zeros((hdulist[0].data.shape[0]+2*boxsize, 
                                     hdulist[0].data.shape[1]+2*boxsize))

            composite[boxsize:boxsize+hdulist[0].data.shape[0], 
                      boxsize:boxsize+hdulist[0].data.shape[1]] = \
                                                            hdulist[0].data

            # extract thumbnail data accordingly
            thumbdata = composite[int(boxsize+obj_y-old_div(boxsize,2)):
                                  int(boxsize+obj_y+old_div(boxsize,2)), 
                                  int(boxsize+obj_x-old_div(boxsize,2)):
                                  int(boxsize+obj_x+old_div(boxsize,2))]

            ## run statistics over center of the frame around the target
            if thumbdata.shape[0] > 0 and thumbdata.shape[1] > 0:
                norm = ImageNormalize(thumbdata, interval=ZScaleInterval(),
                                stretch={'linear': LinearStretch(),
                                         'log': LogStretch()}[imagestretch])
                # extract aperture radius
                if _pp_conf.photmode == 'APER':
                    aprad = float(hdulist[0].header['APRAD'])

                # create plot
                #plotsize = 7. # inches
                fig = plt.figure()
                img = plt.imshow(thumbdata, cmap='gray',
                                 origin='lower', norm=norm)
                # remove axes
                plt.axis('off')
                img.axes.get_xaxis().set_visible(False)
                img.axes.get_yaxis().set_visible(False)

                plt.annotate('%s\n%5.3f+-%5.3f mag' % (fitsfilename,
                                                       dat[7], dat[8]), (3,10), 
                             color='white')

                # place aperture
                if _pp_conf.photmode == 'APER':
                    targetpos = plt.Circle((boxsize/2, boxsize/2), 
                                           aprad, ec='red', fc='none',
                                           linewidth=1)
                else:
                    targetpos = plt.Rectangle((boxsize/2-7, boxsize/2-7),
                                              15, 15, ec='red', fc='none',
                                              linewidth=1)
                plt.gca().add_patch(targetpos)
                    
                # place expected position (if within thumbnail)
                if (abs(exp_x-obj_x) <= old_div(boxsize,2.) and 
                    abs(exp_y-obj_y) <= old_div(boxsize,2.)): 
                    plt.scatter(exp_x-obj_x+old_div(boxsize,2.), 
                                exp_y-obj_y+old_div(boxsize,2.), 
                                marker='+', s=100, color='green')

                thumbfilename = '.diagnostics/' + \
                            target.translate(_pp_conf.target2filename) + '_' + \
                            fitsfilename[:fitsfilename.find('.fit')] + \
                            '_thumb.png'
                plt.savefig(thumbfilename, format='png',
                            bbox_inches='tight', 
                            pad_inches=0)
                plt.close()
                hdulist.close()
                data['thumbnailplots'][target].append((fitsfilename, 
                                                       thumbfilename))
            else:
                logging.warning('cannot produce thumbnail image ' + \
                                'for %s in frame %s' % (target, dat[10]))
                continue 


        ## create gif animation
        gif_filename = ('%s.gif' % target.translate(_pp_conf.target2filename))
        logging.info('converting images to gif: %s' % gif_filename)
        root = os.getcwd()
        os.chdir(_pp_conf.diagroot)
        try:
            convert = subprocess.Popen(['convert', '-delay', '50', 
                                        ('%s*thumb.png' % 
                                (target.translate(_pp_conf.target2filename))), 
                                        '-loop', '0', 
                                        ('%s' % gif_filename)])

            convert.wait()
        except:
            logging.warning('could not produce gif animation for ' \
                            + 'target %s' % target)
        data['gifs'][target] = '.diagnostics/' + gif_filename
        os.chdir(root)


    ### create results website for each target
    data['resultswebsites'] = {}
    for target in data['targetnames']:

        if sys.version_info < (3, 0):
            target = str(target)

        html  = "<H2>%s - Photometric Results</H2>\n" % target
        html += "<P><IMG SRC=\"%s\">\n" % \
                data['lightcurveplots'][target].split('.diagnostics/')[1]
        html += "<IMG SRC=\"%s\">\n" % \
                data['gifs'][target].split('.diagnostics/')[1]

        # create summary table
        html += "<TABLE BORDER=\"1\">\n<TR>\n"
        html += "<TH>Filename</TH><TH>Julian Date</TH><TH>Target (mag)</TH>" \
            + "<TH>sigma (mag)</TH><TH>Target RA (deg)</TH>" \
            + "<TH>Target Dec (deg)</TH><TH>RA Offset (\")</TH>" \
            + "<TH>Dec Offset (\")</TH>\n</TR>\n"
        for dat in data[target]:
            html += ("<TR><TD><A HREF=\"#%s\">%s</A></TD>" \
                     + "<TD>%15.7f</TD><TD>%7.4f</TD>" \
                     + "<TD>%6.4f</TD><TD>%13.8f</TD>" \
                     + "<TD>%+13.8f</TD><TD>%5.2f</TD><TD>%5.2f</TD>\n" \
                     + "</TR>\n" )% \
                (dat[10], dat[10], dat[9][0], dat[7], dat[8], dat[3], dat[4], 
                 ((dat[1]-dat[3])*3600.), ((dat[2]-dat[4])*3600.))
        html += "</TABLE>\n"

        # plot individual thumbnails
        html += "<H3>Thumbnails</H3>\n"
        for idx, plts in enumerate(data['thumbnailplots'][target]):
            html += "<P>%s<IMG ID=\"%s\" SRC=\"%s\">\n" % (plts[0],
                                    data[target][idx][10],
                                    plts[1].split('.diagnostics/')[1])
        filename = '.diagnostics/' + \
                   target.translate(_pp_conf.target2filename) + \
                   '_' + 'results.html'
        create_website(filename, html)
        data['resultswebsites'][target] = filename 


    ### update index.html
    html  = "<H2>Photometry Results</H2>\n"
    html += "<P>photometric data obtained for %d object(s): \n" % \
            len(data['targetnames'])
    for target in data['targetnames']:
        html += "<BR><A HREF=\"%s\">%s</A>\n" % \
                (data['resultswebsites'][target], target)
    for target in data['targetnames']:
        html += "<P><IMG SRC=\"%s\">\n" % data['lightcurveplots'][target]
        html += "<IMG SRC=\"%s\">\n" % data['gifs'][target]
    append_website(_pp_conf.index_filename, html, 
                   replace_below="<H2>Photometry Results</H2>\n")    

    return None
Esempio n. 5
0
def hcongrid(image, header1, header2, **kwargs):
    """
    Interpolate an image from one FITS header onto another

    kwargs will be passed to `scipy.ndimage.map_coordinates`

    Parameters
    ----------
    image : ndarray
        A two-dimensional image 
    header1 : `pyfits.Header` or `pywcs.WCS`
        The header or WCS corresponding to the image
    header2 : `pyfits.Header` or `pywcs.WCS`
        The header or WCS to interpolate onto

    Returns
    -------
    ndarray with shape defined by header2's naxis1/naxis2

    Raises
    ------
    TypeError if either is not a Header or WCS instance
    Exception if image1's shape doesn't match header1's naxis1/naxis2

    Examples
    --------
    (not written with >>> because test.fits/test2.fits do not exist)
    fits1 = pyfits.open('test.fits')
    target_header = pyfits.getheader('test2.fits')
    new_image = hcongrid(fits1[0].data, fits1[0].header, target_header)

    """

    if issubclass(pywcs.WCS, header1.__class__):
        wcs1 = header1
    else:
        try:
            wcs1 = pywcs.WCS(header1)
        except:
            raise TypeError(
                "Header1 must either be a pyfits.Header or pywcs.WCS instance")

    if not (wcs1.naxis1 == image.shape[1] and wcs1.naxis2 == image.shape[0]):
        raise Exception("Image shape must match header shape.")

    if issubclass(pywcs.WCS, header2.__class__):
        wcs2 = header2
    else:
        try:
            wcs2 = pywcs.WCS(header2)
        except:
            raise TypeError(
                "Header2 must either be a pyfits.Header or pywcs.WCS instance")

    if not all([w1 == w2 for w1, w2 in zip(wcs1.wcs.ctype, wcs2.wcs.ctype)]):
        # do unit conversions
        raise NotImplementedError(
            "Unit conversions have not yet been implemented.")

    # sigh... why does numpy use matrix convention?  Makes everything so much harder...
    outshape = [wcs2.naxis2, wcs2.naxis1]
    yy2, xx2 = np.indices(outshape)
    lon2, lat2 = wcs2.wcs_pix2sky(xx2, yy2, 0)
    xx1, yy1 = wcs1.wcs_sky2pix(lon2, lat2, 0)
    grid1 = np.array([yy1.reshape(outshape), xx1.reshape(outshape)])

    newimage = scipy.ndimage.map_coordinates(np.nan_to_num(image), grid1,
                                             **kwargs)

    return newimage
Esempio n. 6
0
def make_narrowband_image(
    detectid=None,
    coords=None,
    shotid=None,
    pixscale=0.25 * u.arcsec,
    imsize=30.0 * u.arcsec,
    wave_range=None,
    convolve_image=True,
    ffsky=True,
    subcont=False,
    dcont=50.,
):

    """
    Function to make narrowband image from either a detectid or from a
    coordinate/shotid combination.
    
    Paramaters
    ----------
    detectid: int
        detectid from the continuum or lines catalog. Default is
        None. Provide a coords/shotid combo if this isn't given
    coords: SkyCoords object
        coordinates to define the centre of the data cube
    pixscale: astropy angle quantity
         plate scale
    imsize: astropy angle quantity
        image size
    wave_range: list or None
        start and stop value for the wavelength range in Angstrom.
        If not given, the detectid linewidth is used
    convolve_image: bool
        option to convolve image with shotid seeing
    ffsky: bool
        option to use full frame calibrated fibers. Default is
        True.
    subcont: bool
        option to subtract continuum. Default is False. This
        will measure the continuum 50AA below and above the
        input wave_range
    dcont
        width in angstrom to measure the continuum. Default is to
        measure 50 AA wide regions on either side of the line
         
    
    Returns
    -------
    hdu: PrimaryHDU object
        the 2D summed data array and associated 2d header
        Units are '10^-17 erg cm-2 s-1'
    
    Examples
    --------

    For a specific detectid:
    >>> hdu = make_narrowband_image(detectid=2101046271)
    
    For a SkyCoords object. You must provide shotid and
    wavelength range

    >>> coords = SkyCoord(188.79312, 50.855747, unit='deg')
    >>> wave_obj = 4235.84 #in Angstrom
    >>> hdu = make_narrowband_image(coords=coords,
                                    shotid=20190524021,
                                    wave_range=[wave_obj-10, wave_obj+10])
    """
    global config, detecth5, surveyh5

    if detectid is not None:
        
        detectid_obj = detectid
        det_info = detecth5.root.Detections.read_where("detectid == detectid_obj")[0]
        shotid_obj = det_info["shotid"]
        wave_obj = det_info["wave"]
        linewidth = det_info["linewidth"]
        wave_range = [wave_obj - 2.0 * linewidth,
                      wave_obj + 2.0 * linewidth]
        coords = SkyCoord(det_info["ra"], det_info["dec"], unit="deg")
    elif coords is not None:
        if shotid is not None:
            shotid_obj = shotid
        else:
            print("Provide a shotid")
        if wave_range is None:
            print(
                "Provide a wavelength range to collapse. \
            Example wave_range=[4500,4540]"
            )
    else:
        print("Provide a detectid or both a coords and shotid")

    fwhm = surveyh5.root.Survey.read_where("shotid == shotid_obj")["fwhm_virus"][0]

    E = Extract()
    E.load_shot(shotid_obj)

    # get spatial dims:
    ndim = int(imsize / pixscale)
    center = int(ndim / 2)

    rad = imsize
    info_result = E.get_fiberinfo_for_coord(coords, radius=rad, ffsky=ffsky)
    ifux, ifuy, xc, yc, ra, dec, data, error, mask = info_result

    # get ifu center:
    ifux_cen, ifuy_cen = E.convert_radec_to_ifux_ifuy(
        ifux, ifuy, ra, dec, coords.ra.deg, coords.dec.deg
    )

    zarray = E.make_narrowband_image(
        ifux_cen,
        ifuy_cen,
        ifux,
        ifuy,
        data,
        mask,
        seeing_fac=fwhm,
        scale=pixscale.to(u.arcsec).value,
        boxsize=imsize.to(u.arcsec).value,
        wrange=wave_range,
        convolve_image=convolve_image,
    )

    imslice = zarray[0]

    if subcont:
        zarray_blue = E.make_narrowband_image(
            ifux_cen,
            ifuy_cen,
            ifux,
            ifuy,
            data,
            mask,
            seeing_fac=fwhm,
            scale=pixscale.to(u.arcsec).value,
            boxsize=imsize.to(u.arcsec).value,
            wrange=[wave_range[0]-dcont, wave_range[0]],
            convolve_image=convolve_image,
        )

        zarray_red = E.make_narrowband_image(
            ifux_cen,
            ifuy_cen,
            ifux,
            ifuy,
            data,
            mask,
            seeing_fac=fwhm,
            scale=pixscale.to(u.arcsec).value,
            boxsize=imsize.to(u.arcsec).value,
            wrange=[wave_range[1], wave_range[1]+dcont],
            convolve_image=convolve_image,
        )
        
        dwave = wave_range[1]-wave_range[0]
        im_cont = (zarray_blue[0] + zarray_red[0])/(2*dcont)

        imslice = zarray[0] - dwave*im_cont


    w = wcs.WCS(naxis=2)
    imsize = imsize.to(u.arcsec).value
    w.wcs.crval = [coords.ra.deg, coords.dec.deg]
    w.wcs.crpix = [center, center]
    w.wcs.ctype = ["RA---TAN", "DEC--TAN"]
    w.wcs.cdelt = [-pixscale.to(u.deg).value, pixscale.to(u.deg).value]

    hdu = fits.PrimaryHDU(imslice, header=w.to_header())

    return hdu
Esempio n. 7
0
def add_registration(data, extraction_data, imagestretch='linear'):
    """
    add registration results to website
    """
    obsparam = extraction_data[0]['parameters']['obsparam']


    # create registration website
    html  = "<H2>Registration Results</H2>\n"
    html += "<TABLE BORDER=\"1\">\n<TR>\n"
    html += "<TH>Filename</TH><TH>AS_CONTRAST</TH><TH>XY_CONTRAST</TH>" \
            + "<TH>RA_sig (arcsec)</TH><TH>DEC_sig (arcsec)</TH>" \
            + "<TH>Chi2_Reference</TH><TH>Chi2_Internal</TH>\n</TR>\n"
    for dat in data['fitresults']:
        html += ("<TR><TD><A HREF=\"%s\">%s</A></TD>" \
                 + "<TD>%4.1f</TD><TD>%4.1f</TD>" \
                 + "<TD>%5.3f</TD><TD>%5.3f</TD>" \
                 + "<TD>%e</TD><TD>%e</TD>\n</TR>\n" )% \
                (dat[0] + '_astrometry.png',
                 dat[0], dat[1], dat[2], dat[3], dat[4], dat[5], dat[6])
    html += "</TABLE>\n"
    html += "<P>AS_CONTRAST: position angle/scale contrast " + \
            "(>%.1f usually ok)\n" % _pp_conf.scamp_as_contrast_limit
    html += "<BR>XY_CONTRAST: xy-shift contrast (>%.1f usually ok)\n" % \
            _pp_conf.scamp_xy_contrast_limit
    create_website(_pp_conf.reg_filename, content=html)


    # load reference catalog
    refcat = catalog(data['catalog'])
    for filename in os.listdir('.'):
        if data['catalog'] in filename and '.cat' in filename:
            refcat.read_ldac(filename)
            break


    ### create frame images
    for dat in extraction_data:
        framefilename = '.diagnostics/' + dat['fits_filename'] + \
                        '_astrometry.png'        
        imgdat = fits.open(dat['fits_filename'], 
                           ignore_missing_end=True)[0].data
        resize_factor = min(1., 1000./numpy.max(imgdat.shape))
        # clip extreme values to prevent crash of imresize
        imgdat = numpy.clip(imgdat, numpy.percentile(imgdat, 1),
                            numpy.percentile(imgdat, 99))
        imgdat = imresize(imgdat, resize_factor, interp='nearest')
        header = fits.open(dat['fits_filename'], 
                           ignore_missing_end=True)[0].header

        norm = ImageNormalize(imgdat, interval=ZScaleInterval(),
                      stretch={'linear': LinearStretch(),
                               'log': LogStretch()}[imagestretch])
        
        # turn relevant header keys into floats
        # astropy.io.fits bug
        for key, val in list(header.items()):
            if 'CD1_' in key or 'CD2_' in key or \
               'CRVAL' in key or 'CRPIX' in key or \
               'EQUINOX' in key:
                header[key] = float(val)
                
        plt.figure(figsize=(5, 5))
        img = plt.imshow(imgdat, cmap='gray', norm=norm,
                         origin='lower')

        # remove axes
        plt.axis('off')
        img.axes.get_xaxis().set_visible(False)
        img.axes.get_yaxis().set_visible(False)

        # plot reference sources
        if refcat.shape[0] > 0:
            try:
                w = wcs.WCS(header)
                world_coo = numpy.array(list(zip(refcat['ra.deg'], 
                                                 refcat['dec.deg'])))
                img_coo = w.wcs_world2pix(world_coo, True )
                img_coo = [c for c
                           in img_coo if (c[0] > 0 and c[1] > 0 and 
                                          c[0] < header[obsparam['extent'][0]] 
                                          and 
                                          c[1] < header[obsparam['extent'][1]])]
                plt.scatter([c[0]*resize_factor for c in img_coo],
                            [c[1]*resize_factor for c in img_coo], 
                            s=5, marker='o', edgecolors='red', linewidth=0.1,
                            facecolor='none')
            except astropy.wcs._wcs.InvalidTransformError:
                logging.error('could not plot reference sources due to '
                              'astropy.wcs._wcs.InvalidTransformError; '
                              'most likely unknown distortion parameters.')

                
        plt.savefig(framefilename, format='png', bbox_inches='tight', 
                    pad_inches=0, dpi=200)
        plt.close()



    # update index.html
    html  = '<H2>Registration</H2>\n'
    html += '%d/%d files have been registered successfully based on %s; ' % \
            (len(data['goodfits']), len(data['goodfits']+data['badfits']),
             data['catalog'])
    if len(data['badfits']) > 0:
        html += '<B>%d files could not be registered</B>;' % \
                len(data['badfits'])  
    html += 'see <A HREF=\"%s\">registration website</A> for details\n' % \
            _pp_conf.reg_filename

    append_website(_pp_conf.index_filename, html, 
                   replace_below="<H2>Registration Results</H2>\n")

    return None
Esempio n. 8
0
def hdr_cood(filename):
    hdulist = pf.open(filename)
    w = wcs.WCS(hdulist[0].header)
    hdulist.close()
    return w
bad_ra, bad_dec, bad_z, bad_bcgx, bad_bcgy = [], [], [], [], []
norm_ra, norm_dec, norm_z, norm_bcgx, norm_bcgy = [], [], [], [], []

for kk in range(len(set_z)):

    ra_g, dec_g, z_g = set_ra[kk], set_dec[kk], set_z[kk]

    #file = home + 'wget_data/frame-%s-ra%.3f-dec%.3f-redshift%.3f.fits.bz2' % ('r', ra_g, dec_g, z_g)
    file = home + 'redMap_random/rand_img-%s-ra%.3f-dec%.3f-redshift%.3f.fits.bz2' % (
        'r', ra_g, dec_g, z_g)

    data = fits.open(file)
    img = data[0].data
    head = data[0].header
    wcs_lis = awc.WCS(head)
    xn, yn = wcs_lis.all_world2pix(ra_g * U.deg, dec_g * U.deg, 1)

    Da_g = Test_model.angular_diameter_distance(z_g).value

    hdu = fits.PrimaryHDU()
    hdu.data = img
    hdu.header = head
    hdu.writeto('test.fits', overwrite=True)

    param_A = 'default_mask_A.sex'
    out_cat = 'default_mask_A.param'

    #out_load_A = load + 'source_find/mask_ra%.3f_dec%.3f_z%.3f_band-%s.cat' % (ra_g, dec_g, z_g, 'r')
    #out_load_A = load + 'source_find/rand_mask_ra%.3f_dec%.3f_z%.3f_band-%s.cat' % (ra_g, dec_g, z_g, 'r')
Esempio n. 10
0
# Set the WCS information manually by setting properties of the WCS
# object.

from __future__ import division, print_function

import numpy
from astropy import wcs
from astropy.io import fits

# Create a new WCS object.  The number of axes must be set
# from the start
w = wcs.WCS(naxis=2)

# Set up an "Airy's zenithal" projection
# Vector properties may be set with Python lists, or Numpy arrays
w.wcs.crpix = [-234.75, 8.3393]
w.wcs.cdelt = numpy.array([-0.066667, 0.066667])
w.wcs.crval = [0, -90]
w.wcs.ctype = ["RA---AIR", "DEC--AIR"]
w.wcs.set_pv([(2, 1, 45.0)])

# Some pixel coordinates of interest.
pixcrd = numpy.array([[0, 0], [24, 38], [45, 98]], numpy.float_)

# Convert pixel coordinates to world coordinates
world = w.wcs_pix2world(pixcrd, 1)
print(world)

# Convert the same coordinates back to pixel coordinates.
pixcrd2 = w.wcs_world2pix(world, 1)
print(pixcrd2)
Esempio n. 11
0
from astropy import wcs
from astropy.io import fits
from astropy import units as u
from astropy import constants as const
from spectral_cube import SpectralCube
import spectral_cube
# _____________________________________
file = "./../Data/higal_data/column_properunits_conv36_source_only.fits"
hdu = fits.open(file)[0]

dend = astrodendro.Dendrogram.load_from("./../Dendrogram_files/clouds_only_dendrogram.fits")
leaves = dend.leaves[9:(len(dend.leaves)-3)]

colfile = file
header = fits.getheader(colfile)
mywcs = wcs.WCS(header)

molecules = ["C18O", "13CO", "H2CO_303_202"]

for mol in molecules:
    if mol == "C18O":
        cube_file = './APEX_data/APEX_C18O_2014_merge.fits'
    if mol == "13CO":
        cube_file = './APEX_data/APEX_13CO_2014_merge.fits'
    if mol == "H2CO_303_202":
        cube_file = './APEX_data/APEX_H2CO_303_202_bl.fits'

    cube = SpectralCube.read(cube_file)
    cube_header = cube.header.copy()
    cube_header.update(mywcs.to_header())
    cube_header['NAXIS1'] = header['NAXIS1']
Esempio n. 12
0
def main(fits_model_root,
         skymodel,
         ref_freq='60e6',
         fits_mask=None,
         min_peak_flux_jy=0.001,
         max_residual_jy=0.00,
         interp='linear'):
    """
    Make a makesourcedb sky model for input MS from WSClean fits model images

    Parameters
    ----------
    fits_model_root : str
        Root name of WSClean fits model files (without the "-XXXX-model.fits" part)
    skymodel : str
        Filename of the output makesourcedb sky model
    ref_freq : float, optional
        Reference freq of the output catalogue in Hz
    fits_mask : str, optional
        Filename of fits mask
    min_peak_flux_jy : float, optional
        Minimum absolute value of flux in Jy of a source in lowest-frequency model image
        to include in output model
    max_residual_jy : float, optional
        Maximum acceptible total residual absolute flux in Jy
    interp : str, optional
        Interpolation method. Can be any supported by scipy.interpolate.interp1d:
            'linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic'

    """
    min_peak_flux_jy = float(min_peak_flux_jy)
    max_residual_jy = float(max_residual_jy)

    if type(fits_mask) is str:
        if fits_mask.lower() == 'none':
            fits_mask = None

    # Find model images: look first for channel images and MFS image
    fits_models = glob.glob(fits_model_root + '-*-model.fits')
    if len(fits_models) > 0:
        # Get the MFS image
        mfs_model = fits_model_root + '-MFS-model.fits'
    else:
        # No channels images found, so look for non-MFS images
        fits_models = glob.glob(fits_model_root + '-model.fits')
        mfs_model = None
    if len(fits_models) == 0:
        print('ERROR: no model images found')
        sys.exit(1)

    # Read in model images
    freqs = []
    model_images = []
    for f in fits_models:
        # Get the frequency info
        hdr = fits.getheader(f, 0, ignore_missing_end=True)
        freqs.append(hdr['CRVAL3'])  # Hz
        model_images.append(fits.getdata(f, 0, ignore_missing_end=True))
    w = wcs.WCS(hdr)

    # Read in MFS image
    if mfs_model is None:
        mfs_model = fits_models[0]
    mfs_image = fits.getdata(mfs_model, 0, ignore_missing_end=True)

    # Sort by freq
    sorted_ind = np.argsort(freqs)
    freqs = np.array(freqs)[sorted_ind]
    fits_models = np.array(fits_models)[sorted_ind]
    model_images = np.array(model_images)[sorted_ind]

    # Find pixels that meet the flux cut (and are in the mask, if given)
    if fits_mask is not None:
        if fits_mask.lower() == 'empty':
            # Handle case in which no sources were found during masking
            nonzero_ind = [[], []]
        else:
            mask = fits.getdata(fits_mask, 0, ignore_missing_end=True)
            nonzero_ind = np.where((np.abs(mfs_image) > min_peak_flux_jy)
                                   & (mask > 0))
    else:
        nonzero_ind = np.where(np.abs(mfs_image) > min_peak_flux_jy)

    # Interpolate the fluxes to the frequency of the MS
    nsources = len(nonzero_ind[0])
    fluxes = []
    names = []
    ras = []
    decs = []
    for i in range(nsources):
        index = [nonzero_ind[j][i] for j in range(4)]
        index.reverse()  # change to WCS coords
        ras.append(
            w.wcs_pix2world(np.array([index]), 0, ra_dec_order=True)[0][0])
        decs.append(
            w.wcs_pix2world(np.array([index]), 0, ra_dec_order=True)[0][1])
        names.append('cc{}'.format(i))
        index.reverse()  # change back to image coords
        flux_array = np.array([im[tuple(index)] for im in model_images])

        # If MS frequency lies outside range, just use nearest freq
        if ref_freq < freqs[0]:
            flux = flux_array[0]
        elif ref_freq > freqs[-1]:
            flux = flux_array[-1]
        else:
            # Otherwise interpolate
            flux = scipy.interpolate.interp1d(freqs, flux_array,
                                              kind=interp)(ref_freq)
        fluxes.append(flux)

    # Remove sources until we reach the desired residual
    if len(fluxes) > 0:
        total_flux = np.sum(np.abs(fluxes))
        keep_ind = np.where(np.abs(fluxes) > min_peak_flux_jy)
        while (total_flux -
               np.sum(np.abs(np.array(fluxes)[keep_ind]))) < max_residual_jy:
            min_peak_flux_jy *= 1.1
            keep_ind = np.where(np.abs(fluxes) > min_peak_flux_jy)
            if len(keep_ind[0]) < 50:
                # keep up to 50 sources regardless of the residual
                break
        fluxes = np.array(fluxes)[keep_ind]
        ras = np.array(ras)[keep_ind]
        decs = np.array(decs)[keep_ind]
        names = np.array(names)[keep_ind]

    # Write sky model
    with open(skymodel, 'w') as outfile:
        outfile.write(
            'FORMAT = Name, Type, Ra, Dec, I, Q, U, V, ReferenceFrequency\n')
        for name, ra, dec, flux in zip(names, ras, decs, fluxes):
            ra_str, dec_str = convert_radec_str(ra, dec)
            outfile.write(
                '{0}, POINT, {1}, {2}, {3}, 0.0, 0.0, 0.0, {4}\n'.format(
                    name, ra_str, dec_str, flux, ref_freq))
Esempio n. 13
0
imlist = [init,sc1,sc2,sc3,sc4,sc5,sc5tt,sc6tt]
residlist = [rinit,rsc1,rsc2,rsc3,rsc4,rsc5,rsc5tt,rsc6tt]

for name, ((ra1,dec1),(ra2,dec2)),(vmin,vmax) in [
         ('SgrB2M', ((ra1m,dec1m),(ra2m,dec2m)), [-0.001, 0.1]),
         ('SgrB2S', ((ra1s,dec1s),(ra2s,dec2s)), [-0.001, 0.05]),]:

    fig = pl.figure(1, figsize=(20,6), dpi=75)
    fig.clf()

    for ii, (fh,rfh) in enumerate(zip(imlist,
                                      residlist)
                                 ):

        print(ii,fh,rfh)
        mywcs = wcs.WCS(fh[0].header)
        center = coordinates.SkyCoord((ra1+ra2)/2, (dec1+dec2)/2, frame='fk5',
                                      unit=(u.deg, u.deg))
        size = max([np.abs(ra2-center.ra.deg), np.abs(dec2-center.dec.deg)]) * 2.1 * u.deg
        cutout_im = Cutout2D(fh[0].data, position=center, size=size, wcs=mywcs)
        cutout_res = Cutout2D(rfh[0].data, position=center, size=size,
                              wcs=mywcs)

        ax = fig.add_subplot(2,len(imlist),ii+1, projection=cutout_im.wcs)
        im = ax.imshow(cutout_im.data*1e3, cmap='gray',
                       norm=astropy.visualization.simple_norm(fh[0].data,
                                                              stretch='asinh',
                                                              min_cut=vmin*1e3,
                                                              max_cut=vmax*1e3,
                                                              asinh_a=0.001),
                       transform=ax.get_transform(cutout_im.wcs),
Esempio n. 14
0
def draw_rectangles(img,
                    catalog,
                    colnames=['x', 'y'],
                    header=None,
                    ax=None,
                    rectangle_size=[30, 30],
                    pixel_scale=0.168,
                    color='r',
                    **kwargs):
    """
    Draw rectangles on an image according to a catalogue. 

    Parameters:
        img (numpy 2-D array): Image itself.
        catalog (``astropy.table.Table`` object): A catalog which contains positions.
        colnames (list): List of string, indicating which columns correspond to positions. 
            It can also be "ra" and "dec", but then "header" is needed.
        header: Header file of a FITS image containing WCS information, typically ``astropy.io.fits.header`` object.  
        ax (``matplotlib.pyplot.axes`` object): The user could provide axes on which the figure will be drawn.
        rectangle_size (list of floats): Size of rectangles, in pixel.
        pixel_scale (float): Pixel size, in arcsec/pixel. Needed for correct scale bar.
        color (str): Color of rectangles.
        **kwargs: other arguments of ``display_single``. 

    Returns:
        ax: If the input ``ax`` is not ``None``.

    """
    if ax is None:
        fig = plt.figure(figsize=(12, 12))
        fig.subplots_adjust(left=0.0,
                            right=1.0,
                            bottom=0.0,
                            top=1.0,
                            wspace=0.00,
                            hspace=0.00)
        gs = gridspec.GridSpec(2, 2)
        gs.update(wspace=0.0, hspace=0.00)
        ax1 = fig.add_subplot(gs[0])
    else:
        ax1 = ax

    # ax1.yaxis.set_major_formatter(NullFormatter())
    # ax1.xaxis.set_major_formatter(NullFormatter())
    # ax1.axis('off')

    from matplotlib.patches import Rectangle
    if np.any([item.lower() == 'ra' for item in colnames]):
        if header is None:
            raise ValueError(
                '# Header containing WCS must be provided to convert sky coordinates into image coordinates.'
            )
            return
        else:
            w = wcs.WCS(header)
            x, y = w.wcs_world2pix(
                Table(catalog)[colnames[0]].data.data,
                Table(catalog)[colnames[1]].data.data, 0)
    else:
        x, y = catalog[colnames[0]], catalog[colnames[1]]
    display_single(img, ax=ax1, pixel_scale=pixel_scale, **kwargs)
    for i in range(len(catalog)):
        e = Rectangle(xy=(x[i] - rectangle_size[0] // 2,
                          y[i] - rectangle_size[1] // 2),
                      height=rectangle_size[0],
                      width=rectangle_size[1],
                      angle=0)
        e.set_facecolor('none')
        e.set_edgecolor(color)
        e.set_alpha(0.7)
        e.set_linewidth(1.3)
        ax1.add_artist(e)
    if ax is not None:
        return ax
Esempio n. 15
0
f.close()
print(ra, dec, field, ccd_num)

path = '/fred/oz100/pipes/DWF_PIPE/MARY_WORK/' + field + '_18060*_mrt1_*/ccd' + ccd_num + '/images_resampled/sci_*.resamp.fits'

print(path)
path_insidefield = []

fitsfileslist = glob.glob(path)
#print(fitsfileslist)
mydic = {}

for path in fitsfileslist:
    hdulist = fits.open(path)
    w = wcs.WCS(hdulist[0].header)
    head = hdulist[0].header
    print(head)
    xlim = head['NAXIS1']
    ylim = head['NAXIS2']
    date = dt.datetime.strptime(head['DATE'], '%Y-%m-%dT%H:%M:%S')

    pixcrd = np.array([[ra, dec]], np.float_)
    print(pixcrd)

    worldpix = w.wcs_world2pix(pixcrd, 1)
    pixx, pixy = worldpix[0][0], worldpix[0][1]
    print(pixx, pixy)

    if pixy < ylim and pixy > 0 and pixx < xlim and pixx > 0:
        path_insidefield.append(path)
Esempio n. 16
0
def fits_XY(image_path,RA,DEC):
    img, hdr = fits.getdata(image_path,header=True)
    img = img.astype(np.float64)
    w = wcs.WCS(hdr)
    RA_px, DEC_px = w.all_world2pix(RA,DEC,0)
    return RA_px, DEC_px, img, hdr
Esempio n. 17
0
    def __init__(self, image, ext, dq_bits=0, dqimage=None, dqext=None,
                 usermask=None, usermask_ext=None):
        """
        Parameters
        ----------
        image: ImageRef
            An :py:class:`~stsci.skypac.utils.ImageRef` object that refers
            to an open FITS file

        ext: tuple, int, str
            Extension specification in the `image` the `SkyLineMember`
            object will be associated with.

            An int `ext` specifies extension number. A tuple in the form
            (str, int) specifies extension name and number. A string `ext`
            specifies extension name and the extension version is assumed
            to be 1. See documentation for `astropy.io.fits.getData`
            for examples.

        dq_bits: int, None (Default = 0)
            Integer sum of all the DQ bit values from the
            input `image`'s DQ array that should be considered "good"
            when building masks for sky computations. For example,
            if pixels in the DQ array can be combinations of 1, 2, 4,
            and 8 flags and one wants to consider DQ "defects" having
            flags 2 and 4 as being acceptable for sky computations,
            then `dq_bits` should be set to 2+4=6. Then a DQ pixel
            having values 2,4, or 6 will be considered a good pixel,
            while a DQ pixel with a value, e.g., 1+2=3, 4+8=12, etc.
            will be flagged as a "bad" pixel.

            | Default value (0) will make *all* non-zero
              pixels in the DQ mask to be considered "bad" pixels,
              and the corresponding image pixels will not be used
              for sky computations.

            | Set `dq_bits` to `None` to turn off the use of
              image's DQ array for sky computations.

            .. note::
                DQ masks (if used), *will be* combined with user masks
                specified by the `usermask` parameter.

        dqimage: ImageRef
            An :py:class:`~stsci.skypac.utils.ImageRef` object that refers
            to an open FITS file that has DQ data of the input `image`.

            .. note::
               When DQ data are located in the same FITS file as the
               science image data (e.g., HST/ACS, HST/WFC3, etc.),
               `dqimage` may point to the
               same :py:class:`~stsci.skypac.utils.ImageRef` object.
               In this case the reference count of the
               :py:class:`~stsci.skypac.utils.ImageRef` object must be
               increased adequately.

        dqext: tuple, int, str
            Extension specification of the `dqimage` that contains
            `image`'s DQ information. See help for `ext` for more
            details on acceptable formats for this parameter.

        usermask: ImageRef
            An :py:class:`~stsci.skypac.utils.ImageRef` object that refers
            to an open FITS file that has user mask data that indicate
            what pixels in the input `image` should be used for sky
            computations (``1``) and which pixels should **not** be used
            for sky computations (``0``).

        usermask_ext: tuple, int, str
            Extension specification of the `usermask` mask file that
            contains user's mask data that should be associated with
            the input `image` and `ext`. See help for `ext` for more
            details on acceptable formats for this parameter.

        """
        assert(hasattr(self.__class__, '_initialized') and
               self.__class__._initialized)
        self._reset()

        # check that input images and extensions are valid --
        # either integers or tuples of strings and integers, e.g., ('sci',1):
        _check_valid_imgext(image, 'image', ext, 'ext', can_img_be_None=False)
        if dq_bits is not None:
            if dqimage is None:
                dq_bits = 0
            else:
                _check_valid_imgext(dqimage, 'dqimage', dqext, 'dqext')
        _check_valid_imgext(usermask, 'usermask', usermask_ext, 'usermask_ext')

        # get telescope, instrument, and detector info:
        self.telescope, self.instrument, self.detector = get_instrument_info(
            image, ext)

        # check dq_bits:
        if dq_bits is not None and not isinstance(dq_bits, int):
            if image:
                dqimage.release()
            if usermask:
                usermask.release()
            if dqimage:
                dqimage.release()
            raise TypeError(
                "Argument 'dq_bits' must be either an integer or None."
            )

        # buld mask:
        self._buildMask(image.original_fname, ext, dq_bits,
                        dqimage, dqext, usermask, usermask_ext)
        if dqimage:
            dqimage.release()
        if usermask:
            usermask.release()

        # save file, user mask, and DQ extension info:
        self._fname = image.original_fname
        self._basefname = basename(self._fname)
        self._image = image
        self._ext = ext
        self._can_free_image = (image.can_reload_data and
                                self.optimize != 'speed')

        # check extension and create a string representation:
        try:
            extstr = ext2str(ext)
        except ValueError:
            raise ValueError("Unexpected extension type '{}' for file {}.".
                             format(ext, self._basefname))

        self._id = "{:s}[{:s}]".format(self._basefname, extstr)

        # extract WCS for bounding-box computation
        try:
            if hasattr(image.hdu[ext], 'wcs'):
                self._wcs = image.hdu[ext].wcs
            else:
                if self.telescope in supported_telescopes:
                    self._wcs = wcsutil.HSTWCS(image.hdu, ext)
                else:
                    self._wcs = pywcs.WCS(image.hdu[ext].header, image.hdu)
            if self._wcs is None:
                raise Exception("Invalid WCS.")

        except Exception as e:
            msg = "Unable to obtain WCS information for the file {:s}." \
                .format(self._id)
            self._ml.error(msg)
            self._ml.flush()
            self._release_all()
            raise e

        # determine pixel scale:
        self._get_pixel_scale()

        # see if image data are in counts or count-rate
        # and compute count(-rate) to flux (per arcsec^2) conversion factor:
        self._brightness_conv_from_hdu(image.hdu, self._idcscale)

        # process Sky user's keyword and its value:
        self._init_skyuser(image.hdu[ext].header)

        # Set polygon to be the bounding box of the chip:
        self._polygon = SphericalPolygon.from_wcs(self.wcs, steps=1)
Esempio n. 18
0
def main(input_image_file,
         vertices_file,
         output_image_file,
         blank_value='zero',
         image_is_wsclean_model=False):
    """
    Blank a region in an image

    Parameters
    ----------
    input_image_file : str
        Filename of input image to blank
    vertices_file : str, optional
        Filename of file with vertices (must be a pickle file containing
        a dictionary with the vertices in the 'vertices' entry)
    output_image_file : str
        Filename of output image
    blank_value : str, optional
        Value for blanks (one of 'zero' or 'nan')
    image_is_wsclean_model : bool, optional
        If True, the input and output image files are treated as the root name
        of a WSClean model image (or images)

    """
    if type(image_is_wsclean_model) is str:
        if image_is_wsclean_model.lower() == 'true':
            image_is_wsclean_model = True
        else:
            image_is_wsclean_model = False

    if image_is_wsclean_model:
        input_image_files = glob.glob(input_image_file + '*-model.fits')
        output_image_files = [
            f.replace(input_image_file, output_image_file)
            for f in input_image_files
        ]
    else:
        input_image_files = [input_image_file]
        output_image_files = [output_image_file]

    if blank_value == 'zero':
        blank_val = 0.0
    elif blank_value == 'nan':
        blank_val = np.nan
    else:
        print('Blank value type "{}" not understood.'.format(blank_with))
        sys.exit(1)

    # Construct polygon of facet region
    header = pyfits.getheader(input_image_files[0], 0)
    w = wcs.WCS(header)
    RAind = w.axis_type_names.index('RA')
    Decind = w.axis_type_names.index('DEC')
    vertices = read_vertices(vertices_file)
    RAverts = vertices[0]
    Decverts = vertices[1]
    xvert = []
    yvert = []
    for RAvert, Decvert in zip(RAverts, Decverts):
        ra_dec = np.array([[0.0, 0.0, 0.0, 0.0]])
        ra_dec[0][RAind] = RAvert
        ra_dec[0][Decind] = Decvert
        xvert.append(w.wcs_world2pix(ra_dec, 0)[0][Decind])
        yvert.append(w.wcs_world2pix(ra_dec, 0)[0][RAind])
    poly = Polygon(xvert, yvert)

    for input_image, output_image in zip(input_image_files,
                                         output_image_files):
        hdu = pyfits.open(input_image, memmap=False)
        data = hdu[0].data

        # Find limits of facet poly and blank pixels outside them
        xmin = max(int(np.min(xvert)) - 2, 0)
        xmax = min(int(np.max(xvert)) + 2, data.shape[2])
        ymin = max(int(np.min(yvert)) - 2, 0)
        ymax = min(int(np.max(yvert)) + 2, data.shape[3])
        data[0, 0, :, :ymin] = blank_val
        data[0, 0, :, ymax:] = blank_val
        data[0, 0, :xmin, :] = blank_val
        data[0, 0, xmax:, :] = blank_val

        # Find distance to nearest poly edge and blank those that
        # are outside the facet (dist < 0)
        pix_ind = np.indices((xmax - xmin, ymax - ymin))
        pix_ind[0] += xmin
        pix_ind[1] += ymin
        dist = poly.is_inside(pix_ind[0], pix_ind[1])
        outside_ind = np.where(dist < 0.0)
        if len(outside_ind[0]) > 0:
            data[0, 0, pix_ind[0][outside_ind],
                 pix_ind[1][outside_ind]] = blank_val

        hdu[0].data = data
        hdu.writeto(output_image, clobber=True)
Esempio n. 19
0
def make_data_cube(
    detectid=None,
    coords=None,
    shotid=None,
    pixscale=0.25 * u.arcsec,
    imsize=30.0 * u.arcsec,
    wave_range=[3470, 5540],
    dwave=2.0,
    dcont=50.0,
    convolve_image=True,
    ffsky=True,
    subcont=False,
):

    """
    Function to make a datacube from either a detectid or from a
    coordinate/shotid combination.
    
    Paramaters
    ----------
    detectid: int
        detectid from the continuum or lines catalog. Default is
        None. Provide a coords/shotid combo if this isn't given
    coords: SkyCoords object
        coordinates to define the centre of the data cube
    pixscale: astropy angle quantity
        plate scale
    imsize: astropy angle quantity
        spatial length of cube (equal dims is only option)
    wave_range: list
        start and stop value for the wavelength range in Angstrom
    dwave
        step in wavelength range in Angstrom
    convolve_image: bool
         option to convolve image with shotid seeing
    ffsky: bool
        option to use full frame calibrated fibers. Default is
        True.
    subcont: bool
        option to subtract continuum. Default is False. This
        will measure the continuum 50AA below and above the
        input wave_range
    dcont
        width in angstrom to measure the continuum. Default is to
        measure 50 AA wide regions on either side of the line  

    Returns
    -------
    hdu: PrimaryHDU object
        the data cube 3D array and associated 3d header
        Units are '10^-17 erg cm-2 s-1 per spaxel'

    Examples
    --------

    Can either pass in a detectid:

    >>> detectid_obj=2101602788
    >>> hdu = make_data_cube( detectid=detectid_obj)
    >>> hdu.writeto( str(detectid_obj) + '.fits', overwrite=True)

    or can put in an SkyCoord object:

    >>> star_coords = SkyCoord(9.625181, -0.043587, unit='deg')
    >>> hdu = make_data_cube( coords=star_coords[0], shotid=20171016108, dwave=2.0)
    >>> hdu.writeto( 'star.fits', overwrite=True)
    
    """
    global config, detecth5, surveyh5

    if detectid is not None:
        detectid_obj = detectid
        det_info = detecth5.root.Detections.read_where("detectid == detectid_obj")[0]
        shotid = det_info["shotid"]
        coords = SkyCoord(det_info["ra"], det_info["dec"], unit="deg")

        if coords is None or shotid is None:
            print("Provide a detectid or both a coords and shotid")

    E = Extract()
    E.load_shot(shotid)

    # get spatial dims:
    ndim = int(imsize / pixscale)
    center = int(ndim / 2)

    # get wave dims:
    nwave = int((wave_range[1] - wave_range[0]) / dwave + 1)

    w = wcs.WCS(naxis=3)
    w.wcs.crval = [coords.ra.deg, coords.dec.deg, wave_range[0]]
    w.wcs.crpix = [center, center, 1]
    w.wcs.ctype = ["RA---TAN", "DEC--TAN", "WAVE"]
    w.wcs.cdelt = [-pixscale.to(u.deg).value, pixscale.to(u.deg).value, dwave]
    
    rad = imsize
    info_result = E.get_fiberinfo_for_coord(coords, radius=rad, ffsky=False)
    ifux, ifuy, xc, yc, ra, dec, data, error, mask = info_result

    # get ifu center:
    ifux_cen, ifuy_cen = E.convert_radec_to_ifux_ifuy(
        ifux, ifuy, ra, dec, coords.ra.deg, coords.dec.deg
    )

    if convolve_image:
        surveyh5 = tb.open_file(config.surveyh5, "r")
        shotid_obj = shotid
        fwhm = surveyh5.root.Survey.read_where("shotid == shotid_obj")["fwhm_virus"][0]
        surveyh5.close()
    else:
        fwhm = 1.8  # just a dummy variable as convolve_image=False

    im_cube = np.zeros((nwave, ndim, ndim))

    wave_i = wave_range[0]
    i = 0

    while wave_i <= wave_range[1]:
        try:
            im_src = E.make_narrowband_image(
                ifux_cen,
                ifuy_cen,
                ifux,
                ifuy,
                data,
                mask,
                scale=pixscale.to(u.arcsec).value,
                wrange=[wave_i, wave_i + dwave],
                nchunks=1,
                seeing_fac=fwhm,
                convolve_image=convolve_image,
                boxsize=imsize.to(u.arcsec).value,
            )

            im_slice = im_src[0]

            if subcont:
                zarray_blue = E.make_narrowband_image(
                    ifux_cen,
                    ifuy_cen,
                    ifux,
                    ifuy,
                    data,
                    mask,
                    seeing_fac=fwhm,
                    scale=pixscale.to(u.arcsec).value,
                    boxsize=imsize.to(u.arcsec).value,
                    nchunks=2,
                    wrange=[wave_i-dcont, wave_i],
                    convolve_image=convolve_image,
                )
                zarray_red = E.make_narrowband_image(
                    ifux_cen,
                    ifuy_cen,
                    ifux,
                    ifuy,
                    data,
                    mask,
                    seeing_fac=fwhm,
                    nchunks=2,
                    scale=pixscale.to(u.arcsec).value,
                    boxsize=imsize.to(u.arcsec).value,
                    wrange=[wave_i + dwave, wave_i + dwave + dcont],
                    convolve_image=convolve_image,
                )

                im_cont = (zarray_blue[0] + zarray_red[0])/(2*dcont)
                im_slice = im_src[0] - dwave*im_cont
    
            im_cube[i, :, :] = im_slice

        except Exception:
            im_cube[i, :, :] = np.zeros((ndim, ndim))
        wave_i += dwave
        i += 1

    hdu = fits.PrimaryHDU(im_cube, header=w.to_header())

    E.close()

    return hdu
Esempio n. 20
0
def statmorphWrapper(index_pairs):

    crash = ['NGVSJ12:29:48.87+13:25:46.0', 'NGVSJ12:30:49.42+12:23:28.0']
    
    df = pd.read_csv('NGVSgalaxies.csv')

    # Iterate through rows in csv file containing measurements for each galaxy
    for row in df.iloc[index_pairs[0]:index_pairs[1]].itertuples(index=True, name='Pandas'):
        galaxy = row.Official_name
        base = 'https://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/files/vault/ngvs/data/NGVS/galaxies/'
        galaxyPath = f'{galaxy}/{galaxy}_G'

        if galaxy in crash :
            continue

        startcopy = time.time()
        # Try to copy galaxy files
        os.system(f'vcp vos:ngvs/data/NGVS/galaxies/{galaxy}/{galaxy}_G.fits /mnt/scratch/temp_galaxy_storage')
        os.system(f'vcp vos:ngvs/data/NGVS/galaxies/{galaxyPath}_iso_model.fits /mnt/scratch/temp_galaxy_storage')
        os.system(f'vcp vos:ngvs/data/NGVS/galaxies/{galaxyPath}_galfit_model.fits /mnt/scratch/temp_galaxy_storage')
        os.system(f'vcp vos:ngvs/data/NGVS/galaxies/{galaxyPath}_mask.fits /mnt/scratch/temp_galaxy_storage')
        os.system(f'vcp vos:ngvs/data/NGVS/galaxies/{galaxyPath}_psf.fits /mnt/scratch/temp_galaxy_storage')
        os.system(f'vcp vos:ngvs/data/NGVS/galaxies/{galaxyPath}_sig.fits /mnt/scratch/temp_galaxy_storage')
        os.system(f'vcp vos:ngvs/data/NGVS/galaxies/{galaxyPath}_iso_residual.fits /mnt/scratch/temp_galaxy_storage')
        os.system(f'vcp vos:ngvs/data/NGVS/galaxies/{galaxyPath}_galfit_residual.fits /mnt/scratch/temp_galaxy_storage')
        endcopy = time.time() - startcopy

        # If one of the required files is missing, continue to next galaxy                                                           
        if(not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G.fits') or (not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_iso_model.fits') and not \
        path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_galfit_model.fits')) or not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_mask.fits') or not \
        path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_psf.fits') or not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_sig.fits') or (not \
        path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_iso_residual.fits') and not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_galfit_residual.fits'))):
            print(f'missing {galaxy}')
            writeFile = open(f'/mnt/scratch/missing/{galaxy}.txt', 'w')     
            writeFile.write(f'{galaxy}\n')
            if(not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G.fits')):
                writeFile.write('missing original\n')
            if(not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_iso_model.fits')):
                writeFile.write('missing iso model\n')
            if(not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_galfit_model.fits')):
                writeFile.write('missing galfit model\n')
            if(not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_mask.fits')):
                writeFile.write('missing mask\n')
            if(not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_psf.fits')):
                writeFile.write('missing psf\n')
            if(not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_sig.fits')):
                writeFile.write('missing sig\n')
            if(not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_iso_residual.fits')):
                writeFile.write('missing iso residual\n')
            if(not path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_galfit_residual.fits')):
                writeFile.write('missing galfit residual\n')
            
            writeFile.close()
            clearTempFiles(galaxy)
            continue

        # ONLY PROCESS LARGE FILE
        #---------------------------------------------------------------------------------------
        if(os.path.getsize(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G.fits') < 300000000):
            clearTempFiles(galaxy)
            continue
        #---------------------------------------------------------------------------------------

        # Create check file for current galaxy
        print(f'checking {galaxy}')
        writeFile = open(f'/mnt/scratch/check/{galaxy}.txt', 'w') 
        writeFile.write('checking')
        writeFile.close()


        # If any of the galaxy files are empty then create galaxy corrupt file and continue to next galaxy
        if(checkCorrupt(galaxy)):
            writeFile = open(f'/mnt/scratch/corrupt/{galaxy}.txt', 'w')     
            writeFile.write(f'corrupt\n')
            writeFile.close()
            clearTempFiles(galaxy)
            continue

        # Beginning of segmentation map creation

        startseg = time.time()

        hdu = fits.open(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G.fits')
        im_header = hdu[0].header
        im_data = hdu[0].data

         # Sky subtraction from original image
        sky_data = np.zeros(np.shape(im_data))
        sky_data += row.SKY
        im_sky_subtracted = im_data - sky_data

        # Calculate nucleus center
        ra = row.NGVS_ra
        dec = row.NGVS_dec
        mywcs = wcs.WCS(im_header)
        xCenter, yCenter = mywcs.all_world2pix([[ra, dec]], 0)[0]

        mask_data = fits.getdata(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_mask.fits')

        # If a iso model exists then use that file for the original model data, otherwise use galfit
        if path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_iso_model.fits'):
            original_model_data = fits.getdata(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_iso_model.fits')
        else:
            original_model_data = fits.getdata(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_galfit_model.fits')

        # If nucleus exists then mask nucleus
        if(row.Nuc_Flag == 1):
            for i in range(len(mask_data)):
                for j in range(len(mask_data)):
                    # TODO: Change radius of nucleus mask
                    if(((i-xCenter)**2) + ((j-yCenter)**2) <= (5**2)):
                        mask_data[i][j] = 100
                    
        
        ellipse_data = np.zeros(np.shape(original_model_data))
        # Calculate median of original model values within 10 pixel radius from nucleus center
        pixelList = []
        for i in range(len(original_model_data)):
            for j in range(len(original_model_data)):
                    if(((i-xCenter)**2) + ((j-yCenter)**2) <= (10**2) and mask_data[i][j] != 100):
                        pixelList.append(original_model_data[i][j])   

        median = statistics.median(pixelList)

        # Create Segmentation Map
        seg_data = np.zeros(np.shape(original_model_data))
        ellipse_data = np.zeros(np.shape(original_model_data))
        
        # isEmpty flag is used for checking if a segmentation map is valid for processing. If segmentation map 2D list is all 0's then script crashes.
        isEmpty = True

        # if median is greater than 2*sky value then create segmentation map from original model values greater than 1.4*sky value within ellipse area
        if(median > 2*row.SKY):
            for i in range(len(original_model_data)):   
                for j in range(len(original_model_data)):
                        if(inEllipse(i,j,xCenter,yCenter,row.Size,row.AxisRatio,row.PA)):
                            ellipse_data[i][j] = 100
                            if(original_model_data[i][j] > (1.4*row.SKY)):
                                seg_data[i][j] = 100
                                isEmpty = False
        # If median is less than 2*sky value then create segmentation map from original model values greater than 1.1*sky value within ellipse area
        else:
            for i in range(len(original_model_data)):
                for j in range(len(original_model_data)):
                        if(inEllipse(i,j,xCenter,yCenter,row.Size,row.AxisRatio,row.PA)):
                            ellipse_data[i][j] = 100
                            if(original_model_data[i][j] > (1.1*row.SKY)):
                                seg_data[i][j] = 100
                                isEmpty = False

        psf = fits.getdata(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_psf.fits')
        weightmap = fits.getdata(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_sig.fits')
        mask_data = np.array(mask_data, dtype=bool)

        endseg = time.time() - startseg
 
        # End of segmentation map creation


        # If the galaxy's segmentation map is empty with no area of interest, then create empty galaxy file and continue to next galaxy
        if(isEmpty):
            writeFile = open(f'/mnt/scratch/emptyseg/{galaxy}.txt', 'w')     
            writeFile.write('empty')
            writeFile.close()
            clearTempFiles(galaxy)
            continue

        start_time = time.time()

        # run statmorph on current galaxy
        source_morphs = statmorph.source_morphology(im_sky_subtracted, seg_data, mask=mask_data, weightmap=weightmap, psf=psf)
        end_time = time.time() - start_time

        morph = source_morphs[0]

        startmodelcreate = time.time()

        # create model from statmorph results
        ny, nx = im_sky_subtracted.shape
        y, x = np.mgrid[0:ny, 0:nx]
        fitted_model = statmorph.ConvolvedSersic2D(
            amplitude=morph.sersic_amplitude,
            r_eff=morph.sersic_rhalf,
            n=morph.sersic_n,
            x_0=morph.sersic_xc,
            y_0=morph.sersic_yc,
            ellip=morph.sersic_ellip,
            theta=morph.sersic_theta)
        fitted_model.set_psf(psf)
        output_model_data = fitted_model(x, y)

        endmodelcreate = time.time() - startmodelcreate
        
        if path.isfile(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_iso_residual.fits'):
            original_res_data = fits.getdata(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_iso_residual.fits')
        else:
            original_res_data = fits.getdata(f'/mnt/scratch/temp_galaxy_storage/{galaxy}_G_galfit_residual.fits')


        startfig = time.time()

        # normalize images, models, segmentation map
        output_res_data = im_sky_subtracted - output_model_data
        p1 = 10. ; p2 = 90.

        im_p1 = np.percentile(im_sky_subtracted.ravel(), p1)
        im_p2 = np.percentile(im_sky_subtracted.ravel(), p2)
        normSky = ImageNormalize(im_sky_subtracted, vmin=im_p1, vmax=im_p2)

        im_p1 = np.percentile(output_model_data.ravel(), p1)
        im_p2 = np.percentile(output_model_data.ravel(), p2)
        normOutputMod = ImageNormalize(output_model_data, vmin=im_p1, vmax=im_p2)

        im_p1 = np.percentile(original_model_data.ravel(), p1)
        im_p2 = np.percentile(original_model_data.ravel(), p2)
        normOriginalMod = ImageNormalize(original_model_data, vmin=im_p1, vmax=im_p2)

        im_p1 = np.percentile(output_res_data.ravel(), p1)
        im_p2 = np.percentile(output_res_data.ravel(), p2)
        normOutputRes = ImageNormalize(output_res_data, vmin=im_p1, vmax=im_p2)

        im_p1 = np.percentile(original_res_data.ravel(), p1)
        im_p2 = np.percentile(original_res_data.ravel(), p2)
        normOriginalRes = ImageNormalize(original_res_data, vmin=im_p1, vmax=im_p2)

        # create figures for images, models, segmentation map
        gs = gridspec.GridSpec(2, 4, width_ratios=[1, 1, 1, 1],
         wspace=0.2, hspace=0, top=0.7, bottom=0.05, left=0.1, right=0.5)

        fig = plt.figure(figsize=(30,10))

        ax= plt.subplot(gs[0,0])
        ax.imshow(im_sky_subtracted, norm=normSky, cmap='gray', origin='lower')
        ax.set_title('Sky Subtracted Image', fontsize=15)

        ax= plt.subplot(gs[0,1])
        ax.imshow(original_model_data, norm=normOriginalMod, cmap='gray', origin='lower')
        ax.set_title('Original Model', fontsize=15)

        ax= plt.subplot(gs[0,2])
        ax.imshow(output_model_data, norm=normOutputMod, cmap='gray', origin='lower')
        ax.set_title('Output Model', fontsize=15)

        ax= plt.subplot(gs[0,3])
        ax.imshow(mask_data, cmap='gray', origin='lower')
        ax.set_title('Mask', fontsize=15)

        ax= plt.subplot(gs[1,0])
        ax.imshow(seg_data, cmap='gray', origin='lower')
        ax.set_title('Segmap', fontsize=15)

        ax= plt.subplot(gs[1,1])
        ax.imshow(ellipse_data, cmap='gray', origin='lower')
        ax.set_title('Ellipse Area', fontsize=15)

        ax= plt.subplot(gs[1,2])
        ax.imshow(original_res_data, norm=normOriginalRes, cmap='gray', origin='lower')
        ax.set_title('Original Residual', fontsize=15)

        ax= plt.subplot(gs[1,3])
        ax.imshow(output_res_data, norm=normOutputRes, cmap='gray', origin='lower')
        ax.set_title('Output Residual', fontsize=15)

        endfig = time.time() - startfig

        # save figures as PNG image to output directory
        fig.savefig(f'/mnt/scratch/output/{galaxy}_sourcemorph:{round(end_time, 2)}_seg={round(endseg, 2)}_RE={round(row.Size, 3)}_mag={round(row.principleg_mag_cg, 3)}.png', facecolor='w', edgecolor='w', transparent=False, bbox_inches='tight')
        plt.close(fig)

        
        # UNCOMMENT TO SAVE AS MULTI EXTENSION FITS FILE INSTEAD OF PNG 
        #------------------------------------------------------------------------------------------------------------------------ 
        # primary_hdu = fits.PrimaryHDU(im_sky_subtracted, header=im_header)
        # image_hdu = fits.ImageHDU(output_model_data)
        # image_hdu2 = fits.ImageHDU(output_res_data)
        # hdul = fits.HDUList([primary_hdu, image_hdu, image_hdu2])

        # upload fits file to VOSpace
        # hdul.writeto(f'/mnt/scratch/output/{galaxy}_output.fits', overwrite=True)
        # os.system(f'vcp /mnt/scratch/output/{galaxy}_output.fits vos:ngvs/data/STATMORPH/FITS_output/{galaxy}_output.fits')
        # if path.isfile(f'/mnt/scratch/output/{galaxy}_output.fits'):
        #    os.system(f'rm /mnt/scratch/output/{galaxy}_output.fits')
        #------------------------------------------------------------------------------------------------------------------------
        
        # UPLOAD PNG FILE WITH FLAGS
        # os.system(f'vcp /mnt/scratch/output/{galaxy}_time:{round(end_time, 2)}_Flag={morph.flag}_SersicFlag={morph.flag_sersic}.png \
        # vos:ngvs/data/STATMORPH/filesize_bug/{galaxy}_time:{round(end_time, 2)}_Flag={morph.flag}_SersicFlag={morph.flag_sersic}.png')
        # if path.isfile(f'/mnt/scratch/output/{galaxy}_time:{round(end_time, 2)}_Flag={morph.flag}_SersicFlag={morph.flag_sersic}.png'):
        #    os.system(f'rm /mnt/scratch/output/{galaxy}_time:{round(end_time, 2)}_Flag={morph.flag}_SersicFlag={morph.flag_sersic}.png')


        # UPLOAD PNG FILE WITH MEDIAN & SKY VALUES
        # os.system(f'vcp /mnt/scratch/output/{galaxy}_time:{round(end_time, 2)}_size={row.Size}_median={median}_2*sky={2*row.SKY}sky={row.SKY}.png \
        # vos:ngvs/data/STATMORPH/memory_bug/{galaxy}_time:{round(end_time, 2)}_size={row.Size}_median={median}_2*sky={2*row.SKY}sky={row.SKY}.png')
        # if path.isfile(f'/mnt/scratch/output/{galaxy}_time:{round(end_time, 2)}_size={row.Size}_median={median}_2*sky={2*row.SKY}sky={row.SKY}.png'):
        #    os.system(f'rm /mnt/scratch/output/{galaxy}_time:{round(end_time, 2)}_size={row.Size}_median={median}_2*sky={2*row.SKY}sky={row.SKY}.png')

        # UPLOAD PNG FILE WITH RUNNING TIMES & RE FACTOR & MAGNITUDE
        os.system(f'vcp /mnt/scratch/output/{galaxy}_sourcemorph:{round(end_time, 2)}_seg={round(endseg, 2)}_RE={round(row.Size, 3)}_mag={round(row.principleg_mag_cg, 3)}.png \
        vos:ngvs/data/STATMORPH/memory_fix/{galaxy}_sourcemorph:{round(end_time, 2)}_seg={round(endseg, 2)}_RE={round(row.Size, 3)}_mag={round(row.principleg_mag_cg, 3)}.png')
        if path.isfile(f'/mnt/scratch/output/{galaxy}_sourcemorph:{round(end_time, 2)}_seg={round(endseg, 2)}_RE={round(row.Size, 3)}_mag={round(row.principleg_mag_cg, 3)}.png'):
            os.system(f'rm /mnt/scratch/output/{galaxy}_sourcemorph:{round(end_time, 2)}_seg={round(endseg, 2)}_RE={round(row.Size, 3)}_mag={round(row.principleg_mag_cg, 3)}.png')

        hdu.close()
        clearTempFiles(galaxy)
        print(f'complete {galaxy}')
Esempio n. 21
0
def add_calibration(data, imagestretch='linear'):
    """
    add calibration results to website
    """

    ### produce calibration plot for each frame
    for idx, cat in enumerate(data['catalogs']):
        if not data['zeropoints'][idx]['success']:
            continue
        ax1 = plt.subplot(211)
        ax1.set_title('%s: %s-band from %s' % 
                      (cat.catalogname, data['filtername'], 
                       data['ref_cat'].catalogname))
        ax1.set_xlabel('Number of Reference Stars')
        ax1.set_ylabel('Magnitude Zeropoint', fontdict={'color':'red'})
        #ax1.ticklabel_format(style='sci', axis='y', scilimits=(-5,5))

        zp_idx = data['zeropoints'][idx]['zp_idx']
        clipping_steps = data['zeropoints'][idx]['clipping_steps'] 
        
        x = [len(clipping_steps[i][3]) for i in range(len(clipping_steps))]

        ax1.errorbar(x, [clipping_steps[i][0] for i
                         in range(len(clipping_steps))],
                     yerr=[clipping_steps[i][1] for i
                           in range(len(clipping_steps))], color='red')
        ax1.set_ylim(ax1.get_ylim()[::-1]) # reverse y axis
        ax1.plot([len(clipping_steps[zp_idx][3]), 
                  len(clipping_steps[zp_idx][3])],
                 ax1.get_ylim(), color='black') 

        ax2 = ax1.twinx()
        ax2.plot(x, [clipping_steps[i][2] for i
                     in range(len(clipping_steps))],
                 color='blue')
        ax2.set_ylabel(r'reduced $\chi^2$', fontdict={'color':'blue'})
        ax2.set_yscale('log')
            
        # residual plot
        ax3 = plt.subplot(212)
        ax3.set_xlabel('Reference Star Magnitude')
        ax3.set_ylabel('Calibration-Reference (mag)')
            
        match = data['zeropoints'][idx]['match']
        x             = match[0][0][clipping_steps[zp_idx][3]]
        residuals     = match[1][0][clipping_steps[zp_idx][3]] \
                        + clipping_steps[zp_idx][0] \
                        - match[0][0][clipping_steps[zp_idx][3]] 
        residuals_sig = numpy.sqrt(match[1][1][clipping_steps[zp_idx][3]]**2\
                                   + clipping_steps[zp_idx][1]**2)

        ax3.errorbar(x, residuals, yerr=residuals_sig, color='black',
                     linestyle='')
        ax3.plot(ax3.get_xlim(), [0,0], color='black', linestyle='--')
        ax3.set_ylim(ax3.get_ylim()[::-1]) # reverse y axis  

        plt.grid()
        plt.savefig(('.diagnostics/%s_photcal.png') % cat.catalogname,
                    format='png')
        data['zeropoints'][idx]['plotfilename'] = \
                                        ('.diagnostics/%s_photcal.png') % \
                                        cat.catalogname
        plt.close()

            
    ### create zeropoint overview plot
    times = [dat['obstime'][0] for dat in data['zeropoints']]
    zp    = [dat['zp'] for dat in data['zeropoints']]
    zperr = [dat['zp_sig'] for dat in data['zeropoints']]

    plt.subplot()
    plt.errorbar(times, zp, yerr=zperr, linestyle='')
    plt.xlabel('Observation Midtime (JD)')
    plt.ylabel('Magnitude Zeropoints (mag)')
    plt.show()
    plt.ylim([plt.ylim()[1], plt.ylim()[0]])
    plt.grid()
    plt.savefig('.diagnostics/zeropoints.png', format='png')
    plt.close()
    data['zpplot'] = 'zeropoints.png'


    ### create calibration website
    html  = "<H2>Calibration Results</H2>\n"
    html += ("<P>Calibration input: minimum number/fraction of reference " \
             + "stars %.2f, reference catalog: %s, filter name: %s\n") % \
        (data['minstars'], data['ref_cat'].catalogname, data['filtername'])
    html += "<TABLE BORDER=\"1\">\n<TR>\n"
    html += "<TH>Filename</TH><TH>Zeropoint (mag)</TH><TH>ZP_sigma (mag)</TH>" \
            + "<TH>N_stars</TH><TH>N_matched</TH>\n</TR>\n"
    for dat in data['zeropoints']:
        if 'plotfilename' in list(dat.keys()):
            html += ("<TR><TD><A HREF=\"#%s\">%s</A></TD>" \
                     + "<TD>%7.4f</TD><TD>%7.4f</TD><TD>%d</TD>" \
                     + "<TD>%d</TD>\n</TR>" ) % \
                (dat['plotfilename'].split('.diagnostics/')[1], 
                 dat['filename'], dat['zp'],
                 dat['zp_sig'], dat['zp_nstars'],
                 len(dat['match'][0][0]))
    html += "</TABLE>\n"
    html += "<P><IMG SRC=\"%s\">" % data['zpplot']
    for dat in data['zeropoints']:
        if not dat['success']:
            continue
        catframe = '.diagnostics/'+ \
                   dat['filename'][:dat['filename'].find('.ldac')] + \
                   '.fits_reference_stars.png'
        html += ("<H3>%s</H3>" \
                 + "<TABLE BORDER=\"0\">\n" \
                 + "<TR><TD><A HREF=\"%s\">" \
                 + "<IMG ID=\"%s\" SRC=\"%s\" HEIGHT=300 WIDTH=400>" \
                 + "</A></TD><TD><A HREF=\"%s\">" \
                 + "<IMG ID=\"%s\" SRC=\"%s\" HEIGHT=400 WIDTH=400>" \
                 + "</A></TD>\n") % \
                (dat['filename'],
                 dat['plotfilename'].split('.diagnostics/')[1], 
                 dat['plotfilename'].split('.diagnostics/')[1],
                 dat['plotfilename'].split('.diagnostics/')[1], 
                 catframe.split('.diagnostics/')[1], 
                 catframe.split('.diagnostics/')[1], 
                 catframe.split('.diagnostics/')[1])
        html += "<TD><TABLE BORDER=\"1\">\n<TR>\n"
        html += "<TH>Idx</TH><TH>Name</TH><TH>RA</TH><TH>Dec</TH>" \
                + "<TH>Catalog (mag)</TH>" \
                + "<TH>Instrumental (mag)</TH><TH>Calibrated (mag)</TH>" \
                + "<TH>Residual (mag</TH>\n</TR>\n"
        for i, idx in enumerate(dat['zp_usedstars']):
            name = dat['match'][0][2][idx]
            if isinstance(name, bytes):
                name = name.decode('utf8')
            html += ("<TR><TD>%d</TD><TD>%s</TD><TD>%12.8f</TD>" \
                     + "<TD>%12.8f</TD><TD>%.3f+-%.3f</TD>" \
                     + "<TD>%.3f+-%.3f</TD>" \
                     + "<TD>%.3f+-%.3f</TD><TD>%.3f</TD></TR>") % \
                (i+1, name,
                 dat['match'][0][3][idx],
                 dat['match'][0][4][idx], dat['match'][0][0][idx], 
                 dat['match'][0][1][idx],
                 dat['match'][1][0][idx], dat['match'][1][1][idx],
                 dat['zp']+dat['match'][1][0][idx], 
                 numpy.sqrt(dat['zp_sig']**2 + dat['match'][1][1][idx]**2),
                 (dat['zp']+dat['match'][1][0][idx])-dat['match'][0][0][idx])
        html += "</TABLE><P>derived zeropoint: %7.4f+-%6.4f mag\n" % \
                (dat['zp'], dat['zp_sig'])
        html += "</TR></TD></TR></TABLE>\n"

        ### create catalog frame
        fits_filename = dat['filename'][:dat['filename'].find('.ldac')] + \
                        '.fits'
        imgdat = fits.open(fits_filename, ignore_missing_end=True)[0].data
        resize_factor = min(1., 1000./numpy.max(imgdat.shape))
        # clip extreme values to prevent crash of imresize
        imgdat = numpy.clip(imgdat, numpy.percentile(imgdat, 1),
                            numpy.percentile(imgdat, 99))
        imgdat = imresize(imgdat, resize_factor, interp='nearest')
        header = fits.open(fits_filename, ignore_missing_end=True)[0].header

        norm = ImageNormalize(imgdat, interval=ZScaleInterval(),
                      stretch={'linear': LinearStretch(),
                               'log': LogStretch()}[imagestretch])
        
        # turn relevant header keys into floats
        # astropy.io.fits bug
        for key, val in list(header.items()):
            if 'CD1_' in key or 'CD2_' in key or \
               'CRVAL' in key or 'CRPIX' in key or \
               'EQUINOX' in key:
                header[key] = float(val)
                
        plt.figure(figsize=(5, 5))
        img = plt.imshow(imgdat, cmap='gray', norm=norm,
                         origin='lower')

        # remove axes
        plt.axis('off')
        img.axes.get_xaxis().set_visible(False)
        img.axes.get_yaxis().set_visible(False)

        # plot reference sources
        if len(dat['match'][0][3]) > 0 and len(dat['match'][0][4]) > 0:
            try:
                w = wcs.WCS(header)
                world_coo = [[dat['match'][0][3][idx],
                              dat['match'][0][4][idx]] \
                             for idx in dat['zp_usedstars']]
                img_coo = w.wcs_world2pix(world_coo, True )

                plt.scatter([c[0]*resize_factor for c in img_coo],
                            [c[1]*resize_factor for c in img_coo], 
                            s=10, marker='o', edgecolors='red', linewidth=0.1,
                            facecolor='none')
                for i in range(len(dat['zp_usedstars'])):
                    plt.annotate(str(i+1), xy=((img_coo[i][0]*resize_factor)+15,
                                               img_coo[i][1]*resize_factor), 
                                 color='red', horizontalalignment='left',
                                 verticalalignment='center')
            except astropy.wcs._wcs.InvalidTransformError:
                logging.error('could not plot reference sources due to '
                              'astropy.wcs._wcs.InvalidTransformError; '
                              'most likely unknown distortion parameters.')

                
        plt.savefig(catframe, format='png', bbox_inches='tight', 
                    pad_inches=0, dpi=200)
        plt.close()

    create_website(_pp_conf.cal_filename, content=html)

    ### update index.html 
    html  = "<H2>Photometric Calibration - Zeropoints</H2>\n"
    html += "match image data with %s (%s);\n" % \
            (data['ref_cat'].catalogname, data['ref_cat'].history)
    html += "see <A HREF=\"%s\">calibration</A> website for details\n" % \
            _pp_conf.cal_filename
    html += "<P><IMG SRC=\"%s\">\n" % ('.diagnostics/' + data['zpplot'])

    append_website(_pp_conf.index_filename, html,
                   replace_below=("<H2>Photometric Calibration "
                                  "- Zeropoints</H2>\n"))

    return None
Esempio n. 22
0
import os
from astropy.io import fits
from astropy import wcs
from astropy import units as u
import regions
import numpy as np
import pylab as pl

if not os.path.exists('W51e2w_ALMAB3_cutout.fits'):
    fh = fits.open(
        '/Users/adam/work/w51/alma/FITS/longbaseline/w51e2_sci.spw0_1_2_3_4_5_6_7_8_9_10_11_12_13_14_15_16_17_18_19.mfs.I.manual.image.tt0.pbcor.fits'
    )
    ww = wcs.WCS(fh[0].header).celestial
    pr0 = regions.read_ds9(
        '/Users/adam/work/w51/vla_q/regions/e2w_ellipse.reg')[0].to_pixel(ww)
    pr0.width *= 2.5
    pr0.height *= 2.5
    msk = pr0.to_mask()
    img_95ghz = msk.multiply(fh[0].data.squeeze())

    header = fh[0].header
    ww_cutout = ww[msk.bbox.slices]
    header.update(ww_cutout.to_header())
    fits.PrimaryHDU(data=img_95ghz,
                    header=header).writeto('W51e2w_ALMAB3_cutout.fits',
                                           overwrite=True)
else:
    img_95ghz = fits.getdata('W51e2w_ALMAB3_cutout.fits')

if not os.path.exists('W51e2w_VLA_Q_cutout.fits'):
    fh = fits.open(
def radprof2map(pos, energy, profile, nametag):
    # read energies and angular profiles
    energies = np.array(pd.read_hdf(energy))[:, 0]
    fluxes = np.array(pd.read_hdf(profile))
    angles = np.array(pd.read_hdf(profile).keys())

    # determine number of pixels to cover the entire profile
    # reduce resolution by factor of 2
    # + 1 makes the final map centered on the pulsar
    npix = len(angles) + 1

    # create wcs for output map
    # output binning will cover the whole model with npix pixels
    out_res = 2 * angles[-1] / (npix - 1)
    out_wcs = wcs.WCS(naxis=3)
    out_wcs.wcs.crpix = [(npix - 1.) / 2 + 1., (npix - 1) / 2 + 1., 1]
    out_wcs.wcs.cdelt = [-out_res, out_res, np.ediff1d(np.log10(energies))[0]]
    out_wcs.wcs.crval = [
        pos.ra.deg[0],
        pos.dec.deg[0],  # centered on pulsar
        np.log10(energies[0])
    ]
    out_wcs.wcs.ctype = ["RA---TAN", "DEC--TAN", "Log10(Energy/1 MeV)"]

    # create output map
    out_map = np.zeros([len(energies), npix, npix])
    # create pixel arrays
    ii = np.linspace(1, npix, npix)
    xv, yv = np.meshgrid(ii, ii)
    xx = xv.flatten()
    yy = yv.flatten()
    # fake array for energy
    zz = np.zeros(np.shape(xx))
    # world position array
    world = out_wcs.wcs_pix2world(np.array([xx, yy, zz]).T, 1)
    # sky coordinate array
    sc = SkyCoord(world[:, 0] * u.deg, world[:, 1] * u.deg, frame='icrs')
    # angular distance array in deg
    sep = sc.separation(pos).base.base
    for k in range(len(energies)):
        # get flux interpolating over angular bins
        flux = np.interp(sep, angles, fluxes[k])
        # set flux to 0 beyond range covered by model
        flux[sep > angles[-1]] = 0.
        # reshape array and fill map
        out_map[k] = flux.reshape(npix, npix)

    # create the main hdu
    hdu = fits.PrimaryHDU(out_map)
    hdu.header = out_wcs.to_header()
    hdu.header.set('BUNIT',
                   'photon/cm2/s/MeV/sr',
                   'Photon flux',
                   after='CRVAL3')
    hdu.verify('fix')

    # create the energy table
    ecol = fits.Column(name='Energy', format='D', unit='MeV', array=energies)
    tbhdu = fits.BinTableHDU.from_columns([ecol], name='ENERGIES')
    tbhdu.verify('fix')

    # write file
    hdulist = fits.HDUList([hdu, tbhdu])
    hdulist.writeto(nametag + '_map.fits', overwrite=True)
Esempio n. 24
0
def get_cubeinfo(header, returnHeader=False, origin=1):
    '''
    A function created to parse the RA, DEC, (and velocity) information from the 2D (3D) header

    - This function has been tested with GALFA-HI/EBHIS cubes, and GALFA-HI 2D images.
    - 		    also been tested with LAB cubes/images that are in (glon, glat) coordinates.
    - The input header can be 2D: NAXIS=2. NAXIS1 is RA (glon), 2 is DEC (glat)
    - 			   or 3D: NAXIS=3. NAXIS1 is RA (glon), 2 is DEC (glat), 3 is Velocity
    - Return: if GALFA-HI or EBHIS:
                 	ra and dec in 2D, with shape of (dec.size, ra.size) or (NAXIS2, NAXIS1)
    -         	 	velocity in 1D.
                        or (ra, dec, vlsr, header array)
    - 	      if LAB:
			glon, glat in 2D, with shape of (glat.size, glon.size) or (NAXIS2, NAXIS1)
    - 			velocity in 1D.
                        or (gl, gb, vlsr, header array)
    - History: updated as of 2016.10.03. Yong Zheng @ Columbia Astro.
    '''

    #import sys
    #import astropy.wcs as wcs
    #import numpy as np


    hdrarrs = []
    if header['NAXIS'] == 2:
        hdr2d = header.copy()
        hdrarrs.append(hdr2d)
    elif header['NAXIS'] == 3:
        # create a 2D header (RA/DEC) to speed up the RA/DEC calculation using astropy.wcs
        hdr2d = header.copy()
        # we don't need the velocity (3) information in the header
        delkey = []
        for key in hdr2d.keys():
            if len(key) != 0 and key[-1] == '3': delkey.append(key)
        for i in delkey: del hdr2d[i]

        hdr2d['NAXIS'] = 2
        if 'WCSAXES' in hdr2d.keys(): hdr2d['WCSAXES']=2

        # create a 1D header (vel) to parse the velocity using astropy.wcs
        hdr1d = header.copy()
        # we don't need the RA/DEC keywords info in the header now.
        delkey = []
        for keya in hdr1d.keys():
            if len(keya) != 0 and keya[-1] in ['1', '2']: delkey.append(keya)
        for i in delkey: del hdr1d[i]
        delkey = []
        for keyb in hdr1d.keys():
            if len(keyb) != 0 and keyb[-1] == '3':
                hdr1d.append('%s1'%(keyb[:-1]))
                hdr1d['%s1'%(keyb[:-1])] = hdr1d[keyb]
                delkey.append(keyb)
        for i in delkey: del hdr1d[i]
        hdr1d['NAXIS'] = 1
        if 'WCSAXES' in hdr1d.keys(): hdr1d['WCSAXES']=1

        # save header arrays
        hdrarrs.append(hdr2d)
        hdrarrs.append(hdr1d)
    else:
        print("This code can only handle 2D or 3D data")
        sys.exit(1)

    return_arrays = []

    # calculate RA, DEC
    gwcsa = wcs.WCS(hdr2d)
    n1, n2 = hdr2d['NAXIS1'], hdr2d['NAXIS2']
    ax = np.reshape(np.mgrid[0:n1:1]+1, (1, n1))  # For FITS standard, origin = 1
    ay = np.reshape(np.mgrid[0:n2:1]+1, (n2, 1))  #   then for numpy standard, origin = 0
    coor1, coor2 = gwcsa.all_pix2world(ax, ay, origin) # coor1 = ra  or glon
    return_arrays.append(coor1) 		  # coor2 = dec or glat
    return_arrays.append(coor2)

    ## calculate VLSR
    if header['NAXIS'] == 3:
        gwcsb = wcs.WCS(hdr1d)
        n1 = hdr1d['NAXIS1']
        ax = np.mgrid[0:n1:1]+1
        # ax = np.linspace(0, n1, n1)  # nope, wrong
        vel = gwcsb.all_pix2world(ax, origin)[0]
        if 'CUNIT1' in hdr1d.keys():
            if hdr1d['CUNIT1'] in ['m/s', 'M/S', 'M/s', 'm/S']:
                vel = vel/1e3
        else: vel = vel/1e3  # default is usually in m/s
        return_arrays.append(vel)

    if returnHeader == True: return_arrays.append(hdrarrs)
    return return_arrays
Esempio n. 25
0
imagein = 'mask_han1_mask_imfit_13co_pix_2_Tmb.fits'
hdulist = fits.open(imagein)
print hdulist[0].data.shape
#sys.exit()
data = hdulist[0].data[0, :, :, :]
datarms = 0.64  # K from data paper table 2
header = hdulist[0].header
crpix3 = header['CRPIX3']
cdelt3 = header['CDELT3']
crval3 = header['CRVAL3']
bmaj = 8.  # header['BMAJ']*3600. # in arcsec
bmin = 8.  # header['BMIN']*3600. # in arcsec
bpa = 0.  # header['BPA']
cellsize = 2.  # abs(header['CDELT1']*3600.) # in arcsec
n1, n2, n3 = data.shape
w = wcs.WCS(header)
hdulist.close()


def beampixel(
    bmaj,
    bmin,
    bpa,
    corecenter,
    cellsize,
    beamfraction=1.
):  # bmaj, bmin, cellsize in arcsec, corecenter = [pixelx, pixely], input bpa in degree
    pixellist = []
    rotation = float(bpa) / 180. * np.pi
    cosa = np.cos(rotation)
    sina = np.sin(rotation)
Esempio n. 26
0
def detect_with_sep(
    event,
    detect_thresh=2.,
    npixels=8,
    grow_seg=5,
    gauss_fwhm=2.,
    gsize=3,
    im_wcs=None,
):
    """ Run SExtractor on a FITS file contained in the Lambda event

    This function will generate a catalog and a PNG for the FITS file stored in
    the Lambda event. The catalog and PNG will be stored in the s3 output
    bucket specified by the Lambda event.

    Parameters
    ----------
    event : dict
        dict containing the data passed to the Lambda function
    detect_thresh: int,
        detection threshold to use for sextractor
    npixels: int,
        minimum number of pixels comprising an object
    grow_seg: int,

    gauss_fwhm: float,
        FWHM of the kernel to use for filtering prior to source finding

    gsize: float

    im_wcs: astropy.wcs.WCS
        WCS object defining the coordinate system of the observation


    Returns
    -------

    """

    drz_file = event['fits_s3_key']
    drz_file_bucket = event['fits_s3_bucket']
    fname = drz_file.split('/')[-1]

    s3 = boto3.resource('s3')
    bkt = s3.Bucket(drz_file_bucket)
    bkt.download_file(drz_file,
                      f"/tmp/{fname}",
                      ExtraArgs={"RequestPayer": "requester"})

    im = fits.open(f"/tmp/{fname}")
    if im_wcs is None:
        im_wcs = wcs.WCS(im[1].header, relax=True)

    data = im[1].data.byteswap().newbyteorder()
    wht_data = im[2].data.byteswap().newbyteorder()
    data_mask = np.cast[data.dtype](data == 0)

    ## Get AB zeropoint
    try:
        photfnu = im[0].header['PHOTFNU']
    except KeyError as e:
        LOG.warning(e)
        ZP = None
    else:
        ZP = -2.5 * np.log10(photfnu) + 8.90

    try:
        photflam = im[0].header['PHOTFLAM']
    except KeyError as e:
        LOG.warning(e)
        ZP = None
    else:
        ZP = -2.5*np.log10(photflam) - 21.10 - \
             5*np.log10(im[0].header['PHOTPLAM']) + 18.6921

    if ZP is None:
        msg = ("Whoops! No zeropoint information found in primary header, "
               f"skipping file {fname}")
        LOG.warning(msg)

    # Scale fluxes to mico-Jy
    uJy_to_dn = 1 / (3631 * 1e6 * 10**(-0.4 * ZP))

    # set up the error array
    err = 1 / np.sqrt(wht_data)
    err[~np.isfinite(err)] = 0
    mask = (err == 0)

    # get the background
    bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3)
    bkg_data = bkg.back()

    ratio = bkg.rms() / err
    err_scale = np.median(ratio[(~mask) & np.isfinite(ratio)])

    err *= err_scale

    # Generate a kernel to use for filtering
    gaussian_kernel = kernels.Gaussian2DKernel(
        x_stddev=gauss_fwhm / gaussian_sigma_to_fwhm,
        y_stddev=gauss_fwhm / gaussian_sigma_to_fwhm,
        x_size=7,
        y_size=7)
    # Normalize the kernel
    gaussian_kernel.normalize()

    # Package the inputs for sextractor
    inputs = {
        'err': err,
        'mask': mask,
        'filter_kernel': gaussian_kernel.array,
        'filter_type': 'conv',
        'minarea': npixels,
        'deblend_nthresh': 32,
        'deblend_cont': 0.005,
        'clean': True,
        'clean_param': 1,
        'segmentation_map': False
    }

    objects = sep.extract(data - bkg_data, detect_thresh, **inputs)

    catalog = Table(objects)

    # add things to catalog
    autoparams = [2.5, 3.5]
    catalog['number'] = np.arange(len(catalog), dtype=np.int32) + 1
    catalog['theta'] = np.clip(catalog['theta'], -np.pi / 2, np.pi / 2)

    # filter out any NaNs
    for c in ['a', 'b', 'x', 'y', 'theta']:
        catalog = catalog[np.isfinite(catalog[c])]

    catalog['ra'], catalog['dec'] = im_wcs.all_pix2world(
        catalog['x'], catalog['y'], 1)

    catalog['ra'].unit = u.deg
    catalog['dec'].unit = u.deg
    catalog['x_world'], catalog['y_world'] = catalog['ra'], catalog['dec']

    kronrad, krflag = sep.kron_radius(data - bkg_data, catalog['x'],
                                      catalog['y'], catalog['a'], catalog['b'],
                                      catalog['theta'], 6.0)

    kronrad *= autoparams[0]
    kronrad[~np.isfinite(kronrad)] = autoparams[1]
    kronrad = np.maximum(kronrad, autoparams[1])

    kron_out = sep.sum_ellipse(data - bkg_data,
                               catalog['x'],
                               catalog['y'],
                               catalog['a'],
                               catalog['b'],
                               catalog['theta'],
                               kronrad,
                               subpix=5,
                               err=err)

    kron_flux, kron_fluxerr, kron_flag = kron_out
    kron_flux_flag = kron_flag

    catalog['mag_auto_raw'] = ZP - 2.5 * np.log10(kron_flux)
    catalog['magerr_auto_raw'] = 2.5 / np.log(10) * kron_fluxerr / kron_flux

    catalog['mag_auto'] = catalog['mag_auto_raw'] * 1.
    catalog['magerr_auto'] = catalog['magerr_auto_raw'] * 1.

    catalog['kron_radius'] = kronrad * u.pixel
    catalog['kron_flag'] = krflag
    catalog['kron_flux_flag'] = kron_flux_flag

    # Make a plot
    im_data = im[1].data
    im_shape = im_data.shape
    im_data[np.isnan(im_data)] = 0.0

    # Trim the top and bottom 1 percent of pixel values
    top = np.percentile(im_data, 99)
    im_data[im_data > top] = top
    bottom = np.percentile(im_data, 1)
    im_data[im_data < bottom] = bottom

    # Scale the data.
    im_data = im_data - im_data.min()
    im_data = (im_data / im_data.max()) * 255.
    im_data = np.uint8(im_data)

    f, (ax) = plt.subplots(1, 1, sharex=True)
    f.set_figheight(12)
    f.set_figwidth(12)
    ax.imshow(im_data, cmap="Greys", clim=(0, 255), origin='lower')
    ax.plot(catalog['x'],
            catalog['y'],
            'o',
            markeredgewidth=1,
            markeredgecolor='red',
            markerfacecolor='None')
    ax.set_xlim([-0.05 * im_shape[1], 1.05 * im_shape[1]])
    ax.set_ylim([-0.05 * im_shape[0], 1.05 * im_shape[0]])

    basename = fname.split('_')[0]
    f.savefig(f"/tmp/{basename}.png")

    # Write the catalog to local disk
    catalog.write(f"/tmp/{basename}.catalog.fits", format='fits')

    # Write out to S3
    s3 = boto3.resource('s3')
    s3.meta.client.upload_file(f"/tmp/{basename}.catalog.fits",
                               event['s3_output_bucket'],
                               f"{basename}/{basename}.catalog.fits")
    s3.meta.client.upload_file(f"/tmp/{basename}.png",
                               event['s3_output_bucket'],
                               f"{basename}/{basename}.png")
def fourier_combine_cubes(
    cube1,
    cube2,
    highresextnum=0,
    highresscalefactor=1.0,
    lowresscalefactor=1.0,
    lowresfwhm=1 * u.arcmin,
    return_regridded_cube2=False,
    return_hdu=False,
):
    """
    Fourier combine two data cubes

    Parameters
    ----------
    cube1 : SpectralCube
    highresfitsfile : str
        The high-resolution FITS file
    cube2 : SpectralCube
    lowresfitsfile : str
        The low-resolution (single-dish) FITS file
    highresextnum : int
        The extension number to use from the high-res FITS file
    highresscalefactor : float
    lowresscalefactor : float
        A factor to multiply the high- or low-resolution data by to match the
        low- or high-resolution data
    lowresfwhm : `astropy.units.Quantity`
        The full-width-half-max of the single-dish (low-resolution) beam;
        or the scale at which you want to try to match the low/high resolution
        data
    return_hdu : bool
        Return an HDU instead of just a cube.  It will contain two image
        planes, one for the real and one for the imaginary data.
    return_regridded_cube2 : bool
        Return the 2nd cube regridded into the pixel space of the first?
    """
    if isinstance(cube1, str):
        cube1 = SpectralCube.read(cube1)
    if isinstance(cube2, str):
        cube2 = SpectralCube.read(cube2)
    #cube1 = spectral_cube.io.fits.load_fits_cube(highresfitsfile,
    #                                             hdu=highresextnum)
    im1 = cube1._data  # want the raw data for this
    hd1 = cube1.header
    assert hd1['NAXIS'] == im1.ndim == 3
    w1 = cube1.wcs
    pixscale = np.abs(w1.wcs.get_cdelt()[0])  # REPLACE EVENTUALLY...

    cube2 = cube2.to(cube1.unit)

    assert cube1.unit == cube2.unit, 'Cubes must have same or equivalent unit'
    assert cube1.unit.is_equivalent(u.Jy / u.beam) or cube1.unit.is_equivalent(
        u.K), "Cubes must have brightness units."

    #f2 = regrid_fits_cube(lowresfitsfile, hd1)
    f2 = regrid_cube_hdu(cube2.hdu, hd1)
    w2 = wcs.WCS(f2.header)

    nax1, nax2, nax3 = (hd1['NAXIS1'], hd1['NAXIS2'], hd1['NAXIS3'])

    dcube1 = im1 * highresscalefactor
    dcube2 = f2.data * lowresscalefactor
    outcube = np.empty_like(dcube1)

    xgrid, ygrid = (np.indices([nax2, nax1]) -
                    np.array([(nax2 - 1.) / 2,
                              (nax1 - 1.) / 2.])[:, None, None])
    fwhm = np.sqrt(8 * np.log(2))
    # sigma in pixels
    sigma = ((lowresfwhm / fwhm / (pixscale * u.deg)).decompose().value)
    #sigma_fftspace = (1/(4*np.pi**2*sigma**2))**0.5
    sigma_fftspace = (2 * np.pi * sigma)**-1
    log.debug('sigma = {0}, sigma_fftspace={1}'.format(sigma, sigma_fftspace))

    kernel = np.fft.fftshift(np.exp(-(xgrid**2 + ygrid**2) / (2 * sigma**2)))
    # convert the kernel, which is just a gaussian in image space,
    # to its corresponding kernel in fourier space
    kfft = np.abs(np.fft.fft2(kernel))  # should be mostly real
    # normalize the kernel
    kfft /= kfft.max()
    ikfft = 1 - kfft

    pb = ProgressBar(dcube1.shape[0])

    for ii, (im1, im2) in enumerate(zip(dcube1, dcube2)):

        fft1 = np.fft.fft2(np.nan_to_num(im1))
        fft2 = np.fft.fft2(np.nan_to_num(im2))

        fftsum = kfft * fft2 + ikfft * fft1

        combo = np.fft.ifft2(fftsum)
        outcube[ii, :, :] = combo.real

        pb.update(ii + 1)

    if return_regridded_cube2:
        return outcube, f2
    elif return_hdu:
        return fits.PrimaryHDU(data=outcube, header=w1.to_header())
    else:
        return outcube
Esempio n. 28
0
def buildmasks(filename, nChan=2000, width=2e9, outdir=None):
    """Builds masks for use in DEGAS imaging pipeline. 

    Parameters
    ----------

    filename : str
        FITS filename of spectral cube mask. The file should be a
        binary mask with True / 1 indicating emission and False / 0
        otherwise.  This assumes the cube has a spectral axis in
        velocity and that the cube or has the metadata required to
        convert to velocity.  Note there is no checking of the
        spectral frame (LSRK, LSRD, BARY) and the conversion assumes
        radio Doppler convention.
  
    nChan : int
        Number of channels in output mask.  This should be larger than
        the number of channels in the DEGAS bandpass (1024)

    width : float
        Spectral width in Hz of the resulting mask.  This should be
        larger than the GBT bandwidth used (usually 1.5 GHz for DEGAS)

    outdir : str
        Directory for output masks to be stored in
    """


    if outdir is None:
       outdir = os.environ['DEGASDIR'] + 'masks/'
       
    if not os.access(outdir,os.W_OK):
        try:
            os.mkdir(outdir)
            print('Made directory {0}'.format(outdir))
        except OSError:
            try:
                os.mkdir('/'.join((outdir.split('/'))[0:-1])) # there may be a safer what to do this with os.path.split
                os.mkdir(outdir)
                print('Made directory {0}'.format(outdir))
            except:
                warnings.warn('Unable to make output directory '+outdir)
                raise
        except:
            warnings.warn('Unable to make output directory '+outdir)
            raise

    # Read in original cube, ensure in velocity space
    s = SpectralCube.read(filename)
    s = s.with_spectral_unit(u.km / u.s, velocity_convention='radio')
    vmid = s.spectral_axis[len(s.spectral_axis)//2].value
    c = 299792.458
    # HCN_HCO+
    # Build a mask with a spectral width of 2 GHz and the same spatial 
    # dimensions as the original mask
  
    s_hcn = s.with_spectral_unit(u.Hz, rest_value=88.631847 * u.GHz)
    s_hcop = s.with_spectral_unit(u.Hz, rest_value=89.188518 * u.GHz)

    mask = np.zeros((nChan, s.shape[1], s.shape[2]), dtype=np.byte)
    hdr = s_hcn.wcs.to_header()
    hdr['CRPIX3'] = 1000
    hdr['CDELT3'] = width / nChan
    hdr['CRVAL3'] = (89.188518 + 88.631847) / 2 * 1e9 * (1 - vmid / c)
    hdr['NAXIS'] = 3
    hdr['NAXIS1'] = mask.shape[0]
    hdr['NAXIS2'] = mask.shape[1]
    hdr['NAXIS3'] = mask.shape[2]
    hdr['SIMPLE'] = 'T'
    hdr['BITPIX'] = 8
    hdr['EXTEND'] = 'T'

    hdr = deduplicate_keywords(hdr)
    w = wcs.WCS(hdr)
    maskcube = SpectralCube(mask, w, header=hdr)
    for zz in range(nChan):
        nu = maskcube.spectral_axis[zz]
        _, _, zz_hcn = s_hcn.wcs.wcs_world2pix(hdr['CRVAL1'],
                                               hdr['CRVAL2'],
                                               nu, 0)
        zz_hcn = int(zz_hcn)
        _, _, zz_hcop = s_hcop.wcs.wcs_world2pix(hdr['CRVAL1'],
                                                 hdr['CRVAL2'],
                                                 nu, 0)
        zz_hcop = int(zz_hcop)
        if 0 <= zz_hcn < s_hcn.shape[0]:
            mask[zz, :, :] = np.array(s_hcn.filled_data[zz_hcn, :, :],
                                      dtype=np.bool)
        if 0 <= zz_hcop < s_hcop.shape[0]:
            mask[zz, :, :] = np.array(s_hcop.filled_data[zz_hcop, :, :],
                                      dtype=np.bool)
    maskcube = SpectralCube(mask, w, header=hdr)
    galname = os.path.split(filename)[1].split('_')[0]
    
    maskcube.write(outdir + galname+'.hcn_hcop.mask.fits',
                   overwrite=True)

    # C18O/13CO
    # Build a mask with a spectral width of 2 GHz and the same spatial
    # dimensions as the original mask

    s_13co = s.with_spectral_unit(u.Hz, rest_value=110.20135 * u.GHz)
    s_c18o = s.with_spectral_unit(u.Hz, rest_value=109.78217 * u.GHz)

    mask = np.zeros((nChan, s.shape[1], s.shape[2]), dtype=np.byte)
    hdr = s_13co.wcs.to_header()
    hdr['CRPIX3'] = 1000
    hdr['CDELT3'] = width / nChan
    hdr['CRVAL3'] = (110.20135 + 109.78217) / 2 * 1e9 * (1 - vmid / c)
    hdr['NAXIS'] = 3
    hdr['NAXIS1'] = mask.shape[0]
    hdr['NAXIS2'] = mask.shape[1]
    hdr['NAXIS3'] = mask.shape[2]
    hdr['SIMPLE'] = 'T'
    hdr['BITPIX'] = 8
    hdr['EXTEND'] = 'T'
    w = wcs.WCS(hdr)
    hdr = deduplicate_keywords(hdr)
    
    maskcube = SpectralCube(mask, w, header=hdr)
    for zz in range(nChan):
        nu = maskcube.spectral_axis[zz]
        _, _, zz_13co = s_13co.wcs.wcs_world2pix(hdr['CRVAL1'],
                                               hdr['CRVAL2'],
                                               nu, 0)
        zz_13co = int(zz_13co)
        _, _, zz_c18o = s_c18o.wcs.wcs_world2pix(hdr['CRVAL1'],
                                                 hdr['CRVAL2'],
                                                 nu, 0)
        zz_c18o = int(zz_c18o)
        if 0 <= zz_13co < s_13co.shape[0]:
            mask[zz, :, :] = np.array(s_13co.filled_data[zz_13co, :, :],
                                      dtype=np.bool)
        if 0 <= zz_c18o < s_c18o.shape[0]:
            mask[zz, :, :] = np.array(s_c18o.filled_data[zz_c18o, :, :],
                                      dtype=np.bool)
    maskcube = SpectralCube(mask, w, header=hdr)
    maskcube.write(outdir + galname + '.13co_c18o.mask.fits',
                   overwrite=True)
    
    # 12CO
    # Build a mask with a spectral width of 2 GHz and the same spatial
    # dimensions as the original mask

    s_12co = s.with_spectral_unit(u.Hz, rest_value=115.271204 * u.GHz)


    mask = np.zeros((nChan, s.shape[1], s.shape[2]), dtype=np.byte)
    hdr = s_12co.wcs.to_header()
    hdr['CRPIX3'] = 1000
    hdr['CDELT3'] = width / nChan
    hdr['CRVAL3'] = (115.271204) * 1e9
    hdr['NAXIS'] = 3
    hdr['NAXIS1'] = mask.shape[0]
    hdr['NAXIS2'] = mask.shape[1]
    hdr['NAXIS3'] = mask.shape[2]
    hdr['SIMPLE'] = 'T'
    hdr['BITPIX'] = 8
    hdr['EXTEND'] = 'T'
    w = wcs.WCS(hdr)
    hdr = deduplicate_keywords(hdr)
    
    maskcube = SpectralCube(mask, w, header=hdr)
    for zz in range(nChan):
        nu = maskcube.spectral_axis[zz]
        _, _, zz_12co = s_12co.wcs.wcs_world2pix(hdr['CRVAL1'],
                                               hdr['CRVAL2'],
                                               nu, 0)
        zz_12co = int(zz_12co)
        if 0 <= zz_12co < s_12co.shape[0]:
            mask[zz, :, :] = np.array(s_12co.filled_data[zz_12co, :, :],
                                      dtype=np.bool)
    maskcube = SpectralCube(mask, w, header=hdr)
    maskcube.write(outdir + galname+'.12co.mask.fits', overwrite=True)
Esempio n. 29
0
def main1():

    m31 = fits.open('fitsfiles/m31cm6i_full_3min_large.fits')
    hdr = m31[0].header
    hdr['NAXIS'] = 2
    m31_w = wcs.WCS(naxis=2)  #[0,:,:]#.slice((0,slice(0,None),slice(0,None)))
    m31_w.wcs.crpix = [hdr['CRPIX1'], hdr['CRPIX2']]
    m31_w.wcs.cdelt = [hdr['CDELT1'], hdr['CDELT2']]
    m31_w.wcs.crval = [hdr['CRVAL1'], hdr['CRVAL2']]
    m31_w.wcs.ctype = [hdr['CTYPE1'], hdr['CTYPE2']]
    m31_w.wcs.crota = [hdr['CROTA1'], hdr['CROTA2']]
    m31_w.wcs.equinox = hdr['EPOCH']

    for k, v in hdr.items():
        print(k, v)
    #stop

    m31_img = m31[0].data[0, :, :]

    #img_flat[select] = 1000
    #img = np.reshape(img_flat, img.shape)
    #img1, w = read_image('fitsfiles/fg4_feeds15.0-17.0-18.0_offset50.0_band1.0_freq0.0.fits')#sys.argv[1])
    #img2, w = read_image('fitsfiles/fg4_feeds15.0-17.0-18.0_offset50.0_band0.0_freq0.0.fits')#sys.argv[1])
    #img = img1-img2
    img, w = read_image(sys.argv[1])

    cmap = pyplot.get_cmap('RdBu_r')
    pyplot.figure(figsize=(12, 8))
    ax = pyplot.subplot(projection=w)

    sources = [SkyCoord('00h38m24.84s', '+41d37m06.00s', frame='icrs')]
    #SkyCoord('00h46m48.1s' ,'+41d41m07.00s',frame='icrs'),
    #SkyCoord('00h42m44.33s','+41d16m07.50s',frame='icrs')]

    mimg = img * 1
    mimg[np.isnan(img)] = 0
    mimg = gaussian_filter(mimg, sigma=2)
    img[img == 0] = np.nan
    mimg[mimg == 0] = np.nan
    zimg = ax.imshow(img, cmap=cmap, origin='lower', aspect='auto')
    cbar = pyplot.colorbar(zimg)
    cbar.set_label('K', size=20)

    for source in sources:
        ax.scatter(source.ra,
                   source.dec,
                   transform=ax.get_transform('icrs'),
                   s=300,
                   edgecolor='k',
                   facecolor='none')

    #print(m31_w)
    #print(w)
    # ax.contour(m31_img, transform=ax.get_transform(m31_w),
    #           origin='lower',
    #           cmap=pyplot.get_cmap('Greys'),
    #           linewidths=3,
    #           alpha=0.85,
    #           levels=[-0.005,0.005,0.010,0.015])
    ##pyplot.contour(mimg, cmap = pyplot.get_cmap('Greys_r'),
    #               levels=[-0.02,-0.015,-0.01,-0.005,0,0.01,0.02,0.045,0.07,0.08,0.09,0.135,0.3,0.4])

    fname = sys.argv[1].split('/')[-1].split('.fit')[0]
    pyplot.gca().invert_xaxis()
    pyplot.grid()
    pyplot.xlabel(r'$\alpha$', size=20)
    pyplot.ylabel(r'$\delta$', size=20)
    pyplot.gca().set_xlim(0.9 * img.shape[1], 0.1 * img.shape[1])
    pyplot.gca().set_ylim(0.1 * img.shape[0], 0.9 * img.shape[0])
    #xpyplot.gca().add_patch(circle)
    pyplot.title(sys.argv[1], size=5)
    #pyplot.savefig('jackknife.png')#.format(fname))
    pyplot.savefig('nooverlay_{}.png'.format(fname))
    pyplot.show()
def create_image_from_visibility(vis, **kwargs) -> Image:
    """Make an empty image from params and Visibility
    
    This makes an empty, template image consistent with the visibility, allowing optional overriding of select
    parameters. This is a convenience function and does not transform the visibilities.

    :param vis:
    :param phasecentre: Phasecentre (Skycoord)
    :param channel_bandwidth: Channel width (Hz)
    :param cellsize: Cellsize (radians)
    :param npixel: Number of pixels on each axis (512)
    :param frame: Coordinate frame for WCS (ICRS)
    :param equinox: Equinox for WCS (2000.0)
    :param nchan: Number of image channels (Default is 1 -> MFS)
    :return: image
    """
    assert isinstance(vis, Visibility) or isinstance(vis, BlockVisibility), \
        "vis is not a Visibility or a BlockVisibility: %r" % (vis)
    
    log.info("create_image_from_visibility: Parsing parameters to get definition of WCS")
    
    imagecentre = get_parameter(kwargs, "imagecentre", vis.phasecentre)
    phasecentre = get_parameter(kwargs, "phasecentre", vis.phasecentre)
    
    # Spectral processing options
    ufrequency = numpy.unique(vis.frequency)
    vnchan = len(ufrequency)
    
    frequency = get_parameter(kwargs, "frequency", vis.frequency)
    inchan = get_parameter(kwargs, "nchan", vnchan)
    reffrequency = frequency[0] * units.Hz
    channel_bandwidth = get_parameter(kwargs, "channel_bandwidth", 0.99999999999 * vis.channel_bandwidth[0]) * units.Hz
    
    if (inchan == vnchan) and vnchan > 1:
        log.info(
            "create_image_from_visibility: Defining %d channel Image at %s, starting frequency %s, and bandwidth %s"
            % (inchan, imagecentre, reffrequency, channel_bandwidth))
    elif (inchan == 1) and vnchan > 1:
        assert numpy.abs(channel_bandwidth.value) > 0.0, "Channel width must be non-zero for mfs mode"
        log.info("create_image_from_visibility: Defining single channel MFS Image at %s, starting frequency %s, "
                 "and bandwidth %s"
                 % (imagecentre, reffrequency, channel_bandwidth))
    elif inchan > 1 and vnchan > 1:
        assert numpy.abs(channel_bandwidth.value) > 0.0, "Channel width must be non-zero for mfs mode"
        log.info("create_image_from_visibility: Defining multi-channel MFS Image at %s, starting frequency %s, "
                 "and bandwidth %s"
                 % (imagecentre, reffrequency, channel_bandwidth))
    elif (inchan == 1) and (vnchan == 1):
        assert numpy.abs(channel_bandwidth.value) > 0.0, "Channel width must be non-zero for mfs mode"
        log.info("create_image_from_visibility: Defining single channel Image at %s, starting frequency %s, "
                 "and bandwidth %s"
                 % (imagecentre, reffrequency, channel_bandwidth))
    else:
        raise ValueError("create_image_from_visibility: unknown spectral mode ")
    
    # Image sampling options
    npixel = get_parameter(kwargs, "npixel", 512)
    uvmax = numpy.max((numpy.abs(vis.data['uvw'][:, 0:1])))
    if isinstance(vis, BlockVisibility):
        uvmax *= numpy.max(frequency) / constants.c.to('m s^-1').value
    log.info("create_image_from_visibility: uvmax = %f wavelengths" % uvmax)
    criticalcellsize = 1.0 / (uvmax * 2.0)
    log.info("create_image_from_visibility: Critical cellsize = %f radians, %f degrees" % (
        criticalcellsize, criticalcellsize * 180.0 / numpy.pi))
    cellsize = get_parameter(kwargs, "cellsize", 0.5 * criticalcellsize)
    log.info("create_image_from_visibility: Cellsize          = %f radians, %f degrees" % (cellsize,
                                                                                           cellsize * 180.0 / numpy.pi))
    override_cellsize = get_parameter(kwargs, "override_cellsize", True)
    if override_cellsize and cellsize > criticalcellsize:
        log.info("create_image_from_visibility: Resetting cellsize %f radians to criticalcellsize %f radians" % (
            cellsize, criticalcellsize))
        cellsize = criticalcellsize
    pol_frame = get_parameter(kwargs, "polarisation_frame", PolarisationFrame("stokesI"))
    inpol = pol_frame.npol
    
    # Now we can define the WCS, which is a convenient place to hold the info above
    # Beware of python indexing order! wcs and the array have opposite ordering
    shape = [inchan, inpol, npixel, npixel]
    w = wcs.WCS(naxis=4)
    # The negation in the longitude is needed by definition of RA, DEC
    w.wcs.cdelt = [-cellsize * 180.0 / numpy.pi, cellsize * 180.0 / numpy.pi, 1.0, channel_bandwidth.to(units.Hz).value]
    # The numpy definition of the phase centre of an FFT is n // 2 (0 - rel) so that's what we use for
    # the reference pixel. We have to use 0 rel everywhere.
    w.wcs.crpix = [npixel // 2 + 1, npixel // 2 + 1, 1.0, 1.0]
    w.wcs.ctype = ["RA---SIN", "DEC--SIN", 'STOKES', 'FREQ']
    w.wcs.crval = [phasecentre.ra.deg, phasecentre.dec.deg, 1.0, reffrequency.to(units.Hz).value]
    w.naxis = 4
    
    direction_centre = pixel_to_skycoord(npixel // 2 + 1, npixel // 2 + 1, wcs=w, origin=1)
    assert direction_centre.separation(imagecentre).value < 1e-7, \
        "Image phase centre [npixel//2, npixel//2] should be %s, actually is %s" % \
        (str(imagecentre), str(direction_centre))
    
    w.wcs.radesys = get_parameter(kwargs, 'frame', 'ICRS')
    w.wcs.equinox = get_parameter(kwargs, 'equinox', 2000.0)
    
    return create_image_from_array(numpy.zeros(shape), wcs=w, polarisation_frame=pol_frame)