Пример #1
0
def LCO_PSF_PHOT(hdu,init_guesses):
	# im ~ np array dat, pixel [x0,y0] ~ float pixel position, sigma_psf ~ LCO-PSF sigma 
	x0,y0=init_guesses
	im = hdu.data
	hdr = hdu.header
	
	fwhm = hdr['L1FWHM']/hdr['PIXSCALE'] # PSF FWHM in pixels, roughly ~ 5 pixels, ~ 2 arcsec 
	sigma_psf = fwhm*gaussian_fwhm_to_sigma # PSF sigma in pixels
	
	psf_model = IntegratedGaussianPRF(sigma=sigma_psf)
	daogroup = DAOGroup(2.0*sigma_psf*gaussian_sigma_to_fwhm)
	mmm_bkg = MMMBackground()
	fitter = LevMarLSQFitter()

	psf_model.x_0.fixed = True
	psf_model.y_0.fixed = True
	pos = Table(names=['x_0', 'y_0'], data=[[x0],[y0]]) # optionally give flux_0 has good aperture method for guessing though

	photometry = BasicPSFPhotometry(group_maker=daogroup,
									 bkg_estimator=mmm_bkg,
									 psf_model=psf_model,
									 fitter=LevMarLSQFitter(),
									 fitshape=(11,11))
	result_tab = photometry(image=im, init_guesses=pos)
	residual_image = photometry.get_residual_image()
	
	return result_tab
Пример #2
0
def photomyPSF(imgdata, position, sigma):
    PSFdata = np.copy(imgdata)
    sigma_psf = sigma
    daogroup = DAOGroup(2.0 * sigma_psf * gaussian_sigma_to_fwhm)
    mmm_bkg = MMMBackground()
    #fitter = LevMarLSQFitter()
    psf_model = IntegratedGaussianPRF(sigma=sigma_psf)

    sources = Table()

    sources['x_mean'] = position[:, 0].T
    sources['y_mean'] = position[:, 1].T

    psf_model.x_0.fixed = True
    psf_model.y_0.fixed = True
    pos = Table(names=['x_0', 'y_0'],
                data=[sources['x_mean'], sources['y_mean']])
    photometry = BasicPSFPhotometry(group_maker=daogroup,
                                    bkg_estimator=mmm_bkg,
                                    psf_model=psf_model,
                                    fitter=LevMarLSQFitter(),
                                    fitshape=(11, 11))

    result_tab = photometry(image=PSFdata, init_guesses=pos)
    positionflux = np.transpose(
        (result_tab['x_fit'], result_tab['y_fit'], result_tab['flux_fit']))

    magstar = 25 - 2.5 * np.log10(abs(result_tab['flux_fit'] / 1))
    return positionflux, magstar
Пример #3
0
def PSF_photometry(data, coord_table, sigma_psf=10, scale=0.67, step=0.5):
    FLUX = []
    bkgrms = MADStdBackgroundRMS()
    std = bkgrms(data)
    iraffind = IRAFStarFinder(threshold=3.5 * std,
                              fwhm=sigma_psf * gaussian_sigma_to_fwhm,
                              minsep_fwhm=0.01,
                              roundhi=5.0,
                              roundlo=-5.0,
                              sharplo=0.0,
                              sharphi=2.0)
    daogroup = DAOGroup(2.0 * sigma_psf * gaussian_sigma_to_fwhm)
    mmm_bkg = MMMBackground()
    fitter = LevMarLSQFitter()
    psf_model = IntegratedGaussianPRF(sigma=sigma_psf)
    # psf_model.x_0.fixed = True
    # psf_model.y_0.fixed = True

    pos = Table(names=['x_0', 'y_0'],
                data=[coord_table['X'],
                      coord_table['Y']])[coord_table['good_star']]

    photometry = IterativelySubtractedPSFPhotometry(finder=iraffind,
                                                    group_maker=daogroup,
                                                    bkg_estimator=mmm_bkg,
                                                    psf_model=psf_model,
                                                    fitter=LevMarLSQFitter(),
                                                    niters=1,
                                                    fitshape=(41, 41))
    result_tab = photometry(image=data, init_guesses=pos)
    return result_tab['flux_fit']
Пример #4
0
def psf_photometry(filepath, filename, show, sn_ra, sn_dec):

    start = time.time()

    warnings.simplefilter('ignore', category=FITSFixedWarning)
    warnings.simplefilter('ignore', category=AstropyUserWarning)

    print(f'Working on {filename}')

    full_filepath = filepath + filename
    image_data = getdata(full_filepath)
    hdr = getheader(full_filepath)
    fwhm = hdr['L1FWHM'] / hdr['PIXSCALE']
    exptime = hdr['EXPTIME']

    metadata = load_pickle(filename)
    epsf_data = np.array(metadata['epsf'])
    epsf = EPSFModel(epsf_data, fwhm=fwhm, oversampling=2)
    # epsf.x_0.fixed = True
    # epsf.y_0.fixed = True
    daogroup = DAOGroup(2.0 * fwhm)
    bkg = MMMBackground()
    fitter = LevMarLSQFitter()
    fitshape = 25
    photometry = BasicPSFPhotometry(group_maker=daogroup,
                                    bkg_estimator=bkg,
                                    psf_model=epsf,
                                    fitter=fitter,
                                    fitshape=fitshape,
                                    aperture_radius=fitshape)

    psfmags = []

    print('\tExtracting other stars . . .')
    counter = 0
    for star in metadata['psf_fitted_stars']:
        counter += 1
        (x, y) = star
        psfmags = _do_phot(x, y, image_data, exptime, fitshape, photometry,
                           psfmags)
        print(
            f'\t\tStars extracted: {counter}/{len(metadata["psf_fitted_stars"])}',
            end='\r')
    print()

    print('\tExtracting supernova . . .')
    x, y = _get_sn_xy(filepath, filename, sn_ra, sn_dec)
    psfmags = _do_phot(x, y, image_data, exptime, fitshape, photometry,
                       psfmags)

    create_or_update_pickle(filename, key='psfmags', val=psfmags)

    end = time.time()
    print(f'Time to perform photometry (s): {end-start:.3f}')

    if show:
        checkmag(image_data, photometry.get_residual_image(), x, y, fitshape)
    print()
Пример #5
0
def findFitVertical(img, x, trailSize):
    parDist, perpDist = GT.getDistancesVertical(x, img.shape[1], img.shape[0])

    median = np.median(img)

    bkg = MMMBackground()
    back = bkg(img)

    return fit(img-median, perpDist, parDist, trailSize)
Пример #6
0
def astropy_psf_photometry(img,
                           sigma_psf,
                           aperture=3,
                           x0=None,
                           y0=None,
                           filter=True,
                           sigma_filter=1):
    """
    performs PSF photometry on an image. If x0 and y0 are None will attempt to locate the target by searching for the
    brightest PSF in the field

    :param img: 2D array, image on which to perform PSF photometry
    :param sigma_psf: float, standard deviation of the PSF
    :param aperture: int, size of the paerture (pixels)
    :param x0: x position of the target (pixels)
    :param y0: y position of the target (pixels)
    :param filter: If True will apply a gaussian filter to the image with standard deviation sigma_filter before
     performing PSF photometry
    :param sigma_filter: standard deviation of gaussian filter to apply to the image
    :return: x0 column of photometry table, y0 column of photometry table, flux column of photometry table
    """
    if filter:
        image = ndimage.gaussian_filter(img, sigma=sigma_filter, order=0)
    else:
        image = img
    bkgrms = MADStdBackgroundRMS()
    std = bkgrms(image[image != 0])
    iraffind = IRAFStarFinder(threshold=2 * std,
                              fwhm=sigma_psf * gaussian_sigma_to_fwhm,
                              minsep_fwhm=0.01,
                              roundhi=5.0,
                              roundlo=-5.0,
                              sharplo=0.0,
                              sharphi=2.0)
    daogroup = DAOGroup(2.0 * sigma_psf * gaussian_sigma_to_fwhm)
    mmm_bkg = MMMBackground()
    fitter = LevMarLSQFitter()
    psf_model = IntegratedGaussianPRF(sigma=sigma_psf)
    if x0 and y0:
        pos = Table(names=['x_0', 'y_0'], data=[x0, y0])
        photometry = BasicPSFPhotometry(group_maker=daogroup,
                                        bkg_estimator=mmm_bkg,
                                        psf_model=psf_model,
                                        fitter=LevMarLSQFitter(),
                                        fitshape=(11, 11))
        res = photometry(image=image, init_guesses=pos)
        return res['x_fit'], res['y_fit'], res['flux_0']
    photometry = BasicPSFPhotometry(finder=iraffind,
                                    group_maker=daogroup,
                                    bkg_estimator=mmm_bkg,
                                    psf_model=psf_model,
                                    fitter=fitter,
                                    fitshape=(11, 11),
                                    aperture_radius=aperture)
    res = photometry(image=image)
    return res['x_0'], res['y_0'], res['flux_0']
Пример #7
0
    def __init__(self,
                 psfimfilename,
                 targetpixscale,
                 psfpixscale=0.03,
                 x_0=None,
                 y_0=None,
                 psf_recenter=False,
                 fix_target_pos=True,
                 fwhm_arcsec=_HST_WFC3_PSF_FWHM_ARCSEC):
        """ 
        :param psfimfilename: full path for a .fits image file
        :param targetpixscale: arcseconds per pixel of the target image        
        :param psfpixscale: arcseconds per pixel of the psf image (if not 
        provided in the image header). Defaults to 0.03" per pixel
        :param xycenter: tuple, giving pixel coordinates of  the star. If not 
        provided, we assume it is at the center of the image.
        :param psf_recenter: execute a centroiding algorithm to locate the
        center of the psf
        :param fix_target_pos: execute "forced photometry" with the center of 
        the target fixed, and only the psf flux scaling as a free parameter.
        """
        psfimage = fits.open(psfimfilename)
        self.header = psfimage[0].header
        self.psfimdat = psfimage[0].data

        # TODO : check the header for pixel scale info

        if x_0 is None:
            self.xpsf = self.psfimdat.shape[0] / 2.,
            self.ypsf = self.psfimdat.shape[1] / 2.
        else:
            self.xpsf, self.ypsf = x_0, y_0

        if psf_recenter:
            self.xpsf, self.ypsf = centroid(self.psfimdat, x_0, y_0)

        self.psfmodel = photutils.psf.models.FittableImageModel(
            self.psfimdat,
            x_0=self.xpsf,
            y_0=self.ypsf,
            oversampling=targetpixscale / psfpixscale)

        # Fix the center of the psf for "forced photometry" -- no recentering
        if fix_target_pos:
            self.psfmodel.x_0.fixed = True
            self.psfmodel.y_0.fixed = True
        else:
            self.psfmodel.x_0.fixed = False
            self.psfmodel.y_0.fixed = False

        # Set up the grouper, background estimator, and fitter objects:
        self.grouper = DAOGroup(2.0 * fwhm_arcsec * psfpixscale)
        self.bkg_estimator = MMMBackground()
        self.fitter = LevMarLSQFitter()
Пример #8
0
    def run(self, stars_coords):
        stack_path = self.fits_explorer.get("stack")[0]
        stack_fwhm = np.mean(self.fwhm_fit.run(fits.getdata(stack_path), stars_coords)[0:2])

        print("{} global psf FWHM: {:.2f} (pixels)".format(INFO_LABEL, np.mean(stack_fwhm)))

        n_stars = np.shape(stars_coords)[0]
        n_images = len(self.files)

        fluxes = np.zeros((n_stars, n_images))

        pos = Table(
            names=["x_0", "y_0"], data=[stars_coords[:, 0], stars_coords[:, 1]]
        )

        daogroup = DAOGroup(2.0 * stack_fwhm * gaussian_sigma_to_fwhm)

        mmm_bkg = MMMBackground()

        psf_model = IntegratedGaussianPRF(sigma=stack_fwhm)
        psf_model.sigma.fixed = False

        sky = []

        psf_model.x_0.fixed = True
        psf_model.y_0.fixed = True

        photometry = BasicPSFPhotometry(
            group_maker=daogroup,
            bkg_estimator=mmm_bkg,
            psf_model=psf_model,
            fitter=LevMarLSQFitter(),
            fitshape=(17, 17)
        )

        for i, image in enumerate(
                tqdm(
                    self.files[0::],
                    desc="Photometry extraction",
                    unit="files",
                    ncols=80,
                    bar_format=TQDM_BAR_FORMAT,
                )
        ):
            image = fits.getdata(image)

            result_tab = photometry(image=image, init_guesses=pos)

            fluxes[:, i] = result_tab["flux_fit"]
            sky.append(1)

        return fluxes, np.ones_like(fluxes), {"sky": sky}
Пример #9
0
    def __init__(self, fwhm, **kwargs):
        super().__init__(**kwargs)

        daogroup = DAOGroup(2.0 * fwhm * gaussian_sigma_to_fwhm)
        mmm_bkg = MMMBackground()
        psf_model = IntegratedGaussianPRF(sigma=fwhm)
        psf_model.sigma.fixed = False
        psf_model.x_0.fixed = True
        psf_model.y_0.fixed = True

        self.photometry = BasicPSFPhotometry(group_maker=daogroup,
                                             bkg_estimator=mmm_bkg,
                                             psf_model=psf_model,
                                             fitter=LevMarLSQFitter(),
                                             fitshape=(17, 17))
Пример #10
0
def do_photometry_basic(image: np.ndarray,
                        σ_psf: float) -> Tuple[Table, np.ndarray]:
    """
    Find stars in an image with IRAFStarFinder

    :param image: The image data you want to find stars in
    :param σ_psf: expected deviation of PSF
    :return: tuple result table, residual image
    """
    bkgrms = MADStdBackgroundRMS()

    std = bkgrms(image)

    iraffind = IRAFStarFinder(threshold=3 * std,
                              sigma_radius=σ_psf,
                              fwhm=σ_psf * gaussian_sigma_to_fwhm,
                              minsep_fwhm=2,
                              roundhi=5.0,
                              roundlo=-5.0,
                              sharplo=0.0,
                              sharphi=2.0)
    daogroup = DAOGroup(0.1 * σ_psf * gaussian_sigma_to_fwhm)

    mmm_bkg = MMMBackground()

    # my_psf = AiryDisk2D(x_0=0., y_0=0.,radius=airy_minimum)
    # psf_model = prepare_psf_model(my_psf, xname='x_0', yname='y_0', fluxname='amplitude',renormalize_psf=False)
    psf_model = IntegratedGaussianPRF(sigma=σ_psf)
    # psf_model = AiryDisk2D(radius = airy_minimum)#prepare_psf_model(AiryDisk2D,xname ="x_0",yname="y_0")
    # psf_model = Moffat2D([amplitude, x_0, y_0, gamma, alpha])

    # photometry = IterativelySubtractedPSFPhotometry(finder=iraffind, group_maker=daogroup,
    #                                                bkg_estimator=mmm_bkg, psf_model=psf_model,
    #                                                fitter=LevMarLSQFitter(),
    #                                                niters=2, fitshape=(11,11))
    photometry = BasicPSFPhotometry(finder=iraffind,
                                    group_maker=daogroup,
                                    bkg_estimator=mmm_bkg,
                                    psf_model=psf_model,
                                    fitter=LevMarLSQFitter(),
                                    aperture_radius=11.0,
                                    fitshape=(11, 11))

    result_table = photometry.do_photometry(image)
    return result_table, photometry.get_residual_image()
Пример #11
0
    def __init__(self, imfit_io, config_file_or_dict={}):

        if type(config_file_or_dict) == str:
            logger.info(f'Loading config file {config_file_or_dict}')
            config = load_config(config_file_or_dict)
        else:
            config = config_file_or_dict.copy()

        self.input_config = config.copy()
        self.imfit_io = config.pop('imfit_io', imfit_io)
        self.sersic_fitter = SersicFitter(out_dir=self.imfit_io)
        self.use_hsc_bright_mask = config.pop('use_hsc_bright_mask',
                                              dict(phot=False, imfit=True))
        self.residual_image_forced = None
        self.residual_image = None

        # daofinder parameters
        self.threshold = config.pop('threshold', 3.0)
        self.daofinder_opt = dict(
            sigma_radius=config.pop('sigma_radius', 3.0),
            sharphi=config.pop('sharphi', 2.0),
            sharplo=config.pop('sharplo', 0.),
            roundlo=config.pop('roundlo', -1.0),
            roundhi=config.pop('roundhi', 1.0),
        )

        # daogroup parameter
        self.crit_separation = config.pop('crit_separation', 1.5)

        # TODO: make these bkgrd methods options
        self.bkg = MMMBackground()
        self.bkgrms = MADStdBackgroundRMS()

        # phot parameters
        self.aperture_radius = config.pop('aperture_radius', 1.0)
        self.phot_opts = dict(
            fitshape=config.pop('fitshape', (15, 15)),
            niters=config.pop('niters', 3),
            bkg_estimator=self.bkg,
        )

        self.master_band = config.pop('master_band', 'i')
        self.max_match_sep = config.pop('max_match_sep', 1.0)
        self.min_match_bands = config.pop('min_match_bands', 4)
Пример #12
0
def init_setup():

    fitimage = fits.open('Serpens3/idxq28010_drz.fits')
    imdata = fitimage[1].data
    head = fitimage[0].header

    bkgrms = MADStdBackgroundRMS()
    std = bkgrms(imdata)
    mean = np.mean(imdata)
    sigma_psf = 2.0
    iraffind = IRAFStarFinder(threshold=3.5 * std,
                              fwhm=sigma_psf * gaussian_sigma_to_fwhm,
                              minsep_fwhm=0.01,
                              roundhi=5.0,
                              roundlo=-5.0,
                              sharplo=0.0,
                              sharphi=2.0)

    daogroup = DAOGroup(2.0 * sigma_psf * gaussian_sigma_to_fwhm)
    mmm_bkg = MMMBackground()

    return imdata, bkgrms, std, sigma_psf, iraffind, daogroup, mmm_bkg, mean
Пример #13
0
    def _basic_psf_flux(self,
                        image,
                        fwhm,
                        x=None,
                        y=None,
                        return_residual_image=False):

        w, h = image.shape
        if x is None:
            x = w / 2
        if y is None:
            y = h / 2

        wfit = w if w % 2 == 1 else w - 1
        hfit = h if h % 2 == 1 else h - 1
        fitshape = (wfit, hfit)

        daogroup = DAOGroup(2.0 * fwhm)
        psf_model = IntegratedGaussianPRF(sigma=fwhm / 2.35)

        photometry = BasicPSFPhotometry(group_maker=daogroup,
                                        bkg_estimator=MMMBackground(),
                                        psf_model=psf_model,
                                        fitshape=fitshape)

        psf_model.x_0.fixed = True
        psf_model.y_0.fixed = True
        pos = Table(names=['x_0', 'y_0'], data=[[x], [y]])

        result = photometry(image=image, positions=pos)
        flux = result["flux_fit"].data[0]

        self.results = result

        if return_residual_image:
            return flux, photometry.get_residual_image()
        else:
            return flux
Пример #14
0
def onpress(event):
    if event.key == 'p':
        filename = d.get('file')
        #hdu=fits.open(filename)
        hdu = d.get_pyfits()
        data = hdu[0].data
        x = d.get('crosshair image')
        x, y = x.split()
        x, y = int(float(x)), int(float(y))

        new_image_halfwidth = int(entry1.get())
        aperture_radius = int(entry2.get())
        inner_sky_radius = int(entry3.get())
        outer_sky_radius = int(entry4.get())

        new_image = data[y - new_image_halfwidth:y + new_image_halfwidth,
                         x - new_image_halfwidth:x + new_image_halfwidth]

        x_grid = np.arange(x - new_image_halfwidth, x + new_image_halfwidth, 1)
        y_grid = np.arange(y - new_image_halfwidth, y + new_image_halfwidth, 1)
        x_grid, y_grid = np.meshgrid(x_grid, y_grid)
        guess = (np.amax(new_image) - new_image[0, 0], x, y, 3, 3, 0,
                 new_image[0, 0])
        popt, pcov = curve_fit(gauss2d, (x_grid, y_grid),
                               new_image.ravel(),
                               p0=guess)
        #print popt
        x = int(popt[1])
        y = int(popt[2])
        labeltext.set('X: ' + str(x))
        label2text.set('Y: ' + str(y))

        new_image = data[y - new_image_halfwidth:y + new_image_halfwidth,
                         x - new_image_halfwidth:x + new_image_halfwidth]
        # for artist in ax1.get_children():
        # 	if hasattr(artist,'get_label') and artist.get_label()=='centroid':
        # 		artist.remove()
        ax1.clear()
        ax1.matshow(np.flip(new_image, axis=0),
                    cmap='gray',
                    origin='upper',
                    clim=zscale(new_image),
                    zorder=0)
        ax1.scatter(
            [new_image_halfwidth + 1], [new_image_halfwidth - 1],
            marker='+',
            s=120,
            c='k',
            zorder=1
        )  #ax1.scatter([popt[1]-x+new_image_halfwidth],[popt[2]-y+new_image_halfwidth],marker='+',s=120,c='k',zorder=1)
        aperture_circle = plt.Circle((popt[1] - x + new_image_halfwidth,
                                      popt[2] - y + new_image_halfwidth),
                                     radius=aperture_radius,
                                     linewidth=3,
                                     color='hotpink',
                                     fill=False,
                                     lw=3,
                                     zorder=2)
        ax1.add_patch(aperture_circle)
        inner_sky = plt.Circle((popt[1] - x + new_image_halfwidth,
                                popt[2] - y + new_image_halfwidth),
                               radius=inner_sky_radius,
                               linewidth=3,
                               color='lime',
                               fill=False,
                               zorder=2)
        ax1.add_patch(inner_sky)
        outer_sky = plt.Circle((popt[1] - x + new_image_halfwidth,
                                popt[2] - y + new_image_halfwidth),
                               radius=outer_sky_radius,
                               linewidth=3,
                               color='red',
                               fill=False,
                               zorder=2)
        ax1.add_patch(outer_sky)
        canvas.draw()

        #update the radial plot
        ax2.clear()
        radial = radial_profile(new_image,
                                [new_image_halfwidth, new_image_halfwidth])
        #perform the aperture photometry
        #currently 2% different from IDL atv
        aperture = CircularAperture((x, y), r=aperture_radius)
        annulus = CircularAnnulus((x, y),
                                  r_in=inner_sky_radius,
                                  r_out=outer_sky_radius)
        phot_table = aperture_photometry(data, [aperture, annulus])
        #new background estimation
        bkg_mask = np.ma.masked_outside(radial[0].reshape(new_image.shape),
                                        inner_sky_radius, outer_sky_radius)
        bkg_map = Background2D(new_image,
                               tuple(np.array(new_image.shape) / 4),
                               mask=bkg_mask.mask,
                               exclude_mesh_method='all')
        bkg_map_med = MMMBackground().calc_background(
            bkg_map.data)  #bkg_map_med=np.median(bkg_map.background)
        #print 'Map sky mean '+str(bkg_map_med)
        #bkg_mean=phot_table['aperture_sum_1']/annulus.area()
        #print 'Aperture sky mean '+str(bkg_mean)
        #phot_table['residual_aperture_sum']=phot_table['aperture_sum_0']-bkg_mean*aperture.area()
        phot_table['residual_aperture_sum'] = phot_table[
            'aperture_sum_0'] - bkg_map_med * aperture.area()
        #print 'Map sky result: '+str(phot_table['aperture_sum_0']-bkg_map_med*aperture.area())
        #print "Aperture Photometry Result: "+str(phot_table['residual_aperture_sum'])
        label8text.set('Sky Value: ' + str(int(bkg_map_med)))
        label9text.set('Aperture Counts: ' +
                       str(int(phot_table['residual_aperture_sum'][0])))
        label10text.set(
            'Mag: ' +
            str(-2.5 * np.log10(int(phot_table['residual_aperture_sum'][0])) +
                25.)[:5])

        ax2.scatter(radial[0], radial[1])
        if var10.get() == 1:
            ax2.plot(np.linspace(0, new_image_halfwidth, num=50),
                     gauss1d(np.linspace(0, new_image_halfwidth, num=50),
                             popt[0], 0, np.mean([popt[3], popt[4]]), popt[6]),
                     c='k',
                     lw=2)
            ax2.text(0.5,
                     0.93,
                     'Gaussian FWHM: ' +
                     str(2.35482 * np.mean([popt[3], popt[4]]))[:5],
                     transform=ax2.transAxes,
                     fontsize=int(15 * scaling))
        if var11.get() == 1:
            moffat1d_guess = (np.amax(new_image) - bkg_map_med, 0, 3, 1,
                              bkg_map_med)
            popt2, pcov2 = curve_fit(moffat1d,
                                     radial[0],
                                     radial[1],
                                     p0=moffat1d_guess)
            ax2.plot(np.linspace(0, new_image_halfwidth, num=50),
                     moffat1d(np.linspace(0, new_image_halfwidth,
                                          num=50), popt2[0], popt2[1],
                              popt2[2], popt2[3], popt2[4]),
                     c='r',
                     lw=2)
            ax2.text(
                0.5,
                0.85,
                'Moffat FWHM: ' +
                str(2.0 * popt2[2] * np.sqrt(2.0**(1. / popt2[3]) - 1.))[:5],
                transform=ax2.transAxes,
                fontsize=int(15 * scaling))
        ax2.grid(True, color='white', linestyle='-', linewidth=1)
        ax2.set_axisbelow(True)
        ax2.autoscale(False)
        ax2.set_xlim([0, new_image_halfwidth])
        ax2.set_xlabel('Radius (Pixels)')
        ax2.set_ylim([np.amin(radial[1]), np.amax(radial[1])])
        ax2.set_axis_bgcolor('0.85')
        ax2.axvline(aperture_radius, linewidth=2, color='hotpink')
        ax2.axvline(inner_sky_radius, linewidth=2, color='lime')
        ax2.axvline(outer_sky_radius, linewidth=2, color='red')
        ax2.axhline(bkg_map_med, linewidth=2, color='yellow')
        canvas2.draw()
Пример #15
0
radial = radial_profile(new_image, [new_image_halfwidth, new_image_halfwidth])
#perform the aperture photometry
#currently 2% different from IDL atv
aperture = CircularAperture((x, y), r=aperture_radius)
annulus = CircularAnnulus((x, y),
                          r_in=inner_sky_radius,
                          r_out=outer_sky_radius)
phot_table = aperture_photometry(data, [aperture, annulus])
#new background estimation
bkg_mask = np.ma.masked_outside(radial[0].reshape(new_image.shape),
                                inner_sky_radius, outer_sky_radius)
bkg_map = Background2D(new_image,
                       tuple(np.array(new_image.shape) / 4),
                       mask=bkg_mask.mask,
                       exclude_mesh_method='all')
bkg_map_med = MMMBackground().calc_background(
    bkg_map.data)  #bkg_map_med=np.median(bkg_map.background)
#print 'Map sky mean '+str(bkg_map_med)
#bkg_mean=phot_table['aperture_sum_1']/annulus.area()
#print 'Aperture sky mean '+str(bkg_mean)
#phot_table['residual_aperture_sum']=phot_table['aperture_sum_0']-bkg_mean*aperture.area()
phot_table['residual_aperture_sum'] = phot_table[
    'aperture_sum_0'] - bkg_map_med * aperture.area()
#print 'Map sky result: '+str(phot_table['aperture_sum_0']-bkg_map_med*aperture.area())
#print "Aperture Photometry Result: "+str(phot_table['residual_aperture_sum'])
fig2 = Figure(figsize=(int(6 * scaling), int(6 * scaling)))
fig2.set_facecolor('0.85')
ax2 = fig2.add_axes([0.1, 0.1, .9, .9])
ax2.grid(True, color='white', linestyle='-', linewidth=1)
ax2.set_axisbelow(True)
ax2.scatter(radial[0], radial[1])
ax2.plot(np.linspace(0, new_image_halfwidth, num=50),
Пример #16
0
        r'/Users/phystastical/Desktop/BB2/script/twomass-j.fits') as hdul:
    image = hdul[0].data

bkgrms = MADStdBackgroundRMS()
std = bkgrms(image)

iraffind = IRAFStarFinder(threshold=3.5 * std,
                          fwhm=sigma_psf * gaussian_sigma_to_fwhm,
                          minsep_fwhm=0.01,
                          roundhi=5.0,
                          roundlo=-5.0,
                          sharplo=0.0,
                          sharphi=2.0)
daogroup = DAOGroup(2.0 * sigma_psf * gaussian_sigma_to_fwhm)

mmm_bkg = MMMBackground()
fitter = LevMarLSQFitter()
psf_model = IntegratedGaussianPRF(sigma=sigma_psf)
photometry = IterativelySubtractedPSFPhotometry(finder=iraffind,
                                                group_maker=daogroup,
                                                bkg_estimator=mmm_bkg,
                                                psf_model=psf_model,
                                                fitter=LevMarLSQFitter(),
                                                niters=1,
                                                fitshape=(11, 11))
result_tab = photometry(image=image)
residual_image = photometry.get_residual_image()

#Plot images made#
plt.subplot(1, 2, 1)
plt.imshow(image,
Пример #17
0
image = fits.getdata('./cutout_BS90_V.fits').astype(np.float64)

epsf_file = 'epsf.pkl'

if not os.path.exists(epsf_file):
    mean, median, std = sigma_clipped_stats(image)
    threshold = median + 10 * std

    finder = IRAFStarFinder(threshold=threshold, fwhm=4, minsep_fwhm=5, peakmax=image.max() / 0.8)

    star_table = finder(image)
    star_table.rename_columns(('xcentroid', 'ycentroid'),('x','y'))

    sigma_clip = SigmaClip(sigma=5.0)
    bkg_estimator = MMMBackground()
    bkg = Background2D(image, 5, filter_size=(3, 3),
                       sigma_clip=sigma_clip, bkg_estimator=bkg_estimator)

    nddata = NDData(image-bkg.background)
    stars = extract_stars(nddata, star_table, size=51)

    epsf, fitted_stars = EPSFBuilder(oversampling=4, maxiters=3, progress_bar=True, smoothing_kernel='quadratic')(stars)
    epsf_model = prepare_psf_model(epsf, renormalize_psf=False)

    with open(epsf_file,'wb') as f:
        pickle.dump([epsf_model, finder], f)
else:
    with open(epsf_file, 'rb') as f:
        epsf_model, finder = pickle.load(f)
Пример #18
0
def basic_psf_phot(target, centroided_sources, plots=False):
    def hmsm_to_days(hour=0,min=0,sec=0,micro=0):
        """
        Convert hours, minutes, seconds, and microseconds to fractional days.
        
        """
        days = sec + (micro / 1.e6)
        days = min + (days / 60.)
        days = hour + (days / 60.)
        return days / 24.
    
    def date_to_jd(year,month,day):
        """
        Convert a date to Julian Day.
        
        Algorithm from 'Practical Astronomy with your Calculator or Spreadsheet', 
            4th ed., Duffet-Smith and Zwart, 2011.
        
        """
        if month == 1 or month == 2:
            yearp = year - 1
            monthp = month + 12
        else:
            yearp = year
            monthp = month
        
        # this checks where we are in relation to October 15, 1582, the beginning
        # of the Gregorian calendar.
        if ((year < 1582) or
            (year == 1582 and month < 10) or
            (year == 1582 and month == 10 and day < 15)):
            # before start of Gregorian calendar
            B = 0
        else:
            # after start of Gregorian calendar
            A = math.trunc(yearp / 100.)
            B = 2 - A + math.trunc(A / 4.)
            
        if yearp < 0:
            C = math.trunc((365.25 * yearp) - 0.75)
        else:
            C = math.trunc(365.25 * yearp)
            
        D = math.trunc(30.6001 * (monthp + 1))
        
        jd = B + C + D + day + 1720994.5
        
        return jd

    def gaussian(p, x, y):
        height, center_x, center_y, width_x, width_y, rotation = p
        rotation = np.deg2rad(rotation)
        x_0 = center_x * np.cos(rotation) - center_y * np.sin(rotation)
        y_0 = center_x * np.sin(rotation) + center_y * np.cos(rotation)

        def rotgauss(x,y):
            xp = x * np.cos(rotation) - y * np.sin(rotation)
            yp = x * np.sin(rotation) + y * np.cos(rotation)
            g = height*np.exp(
                -(((x_0-xp)/width_x)**2+
                  ((y_0-yp)/width_y)**2)/2.)
            return g
        
        g = rotgauss(x,y)

        return g

    def moments(data):
        total = np.nansum(data)
        X, Y = np.indices(data.shape)
        center_x = int(np.shape(data)[1]/2)
        center_y = int(np.shape(data)[0]/2)
        row = data[int(center_x), :]
        col = data[:, int(center_y)]
        width_x = np.nansum(np.sqrt(abs((np.arange(col.size)-center_y)**2*col))
                            /np.nansum(col))
        width_y = np.nansum(np.sqrt(abs((np.arange(row.size)-center_x)**2*row))
                            /np.nansum(row))
        height = np.nanmax(data)
        rotation = 0.0
        return height, center_x, center_y, width_x, width_y, rotation

    def errorfunction(p, x, y, data):
        return gaussian(p, x, y) - data

    def fitgaussian(data):
        params = moments(data)
        X, Y = np.indices(data.shape)
        mask = ~np.isnan(data)
        x = X[mask]
        y = Y[mask]
        data = data[mask]
        p, success = optimize.leastsq(errorfunction, params, args=(x, y, data))
        return p

    pines_path = pines_dir_check()
    short_name = short_name_creator(target)
    reduced_path = pines_path/('Objects/'+short_name+'/reduced/')
    reduced_files = np.array(natsort.natsorted([x for x in reduced_path.glob('*.fits')]))

    centroided_sources.columns = centroided_sources.columns.str.strip()
    source_names = natsort.natsorted(list(set([i[0:-2].replace('X','').replace('Y','').rstrip().lstrip() for i in centroided_sources.keys()])))    

    #Declare a new dataframe to hold the information for all targets for this .
    columns = ['Filename', 'Time UT', 'Time JD', 'Airmass', 'Seeing']
    for i in range(0, len(source_names)):
        columns.append(source_names[i]+' Flux')
        columns.append(source_names[i]+' Flux Error')
    psf_df = pd.DataFrame(index=range(len(reduced_files)), columns=columns)
    output_filename = pines_path/('Objects/'+short_name+'/psf_phot/'+short_name+'_psf_phot.csv')

    for i in range(len(reduced_files)):
        #Read in image data/header. 
        file = reduced_files[i]
        data = fits.open(file)[0].data
        header = fits.open(file)[0].header
        print('{}, image {} of {}.'.format(file.name, i+1, len(reduced_files)))

        #Read in some supporting information.
        log_path = pines_path/('Logs/'+file.name.split('.')[0]+'_log.txt')
        log = pines_log_reader(log_path)
        date_obs = header['DATE-OBS']
        #Catch a case that can cause datetime strptime to crash; Mimir headers sometimes have DATE-OBS with seconds specified as 010.xx seconds, when it should be 10.xx seconds. 
        if len(date_obs.split(':')[-1].split('.')[0]) == 3:
            date_obs = date_obs.split(':')[0] + ':' + date_obs.split(':')[1] + ':' + date_obs.split(':')[-1][1:]
        #Keep a try/except clause here in case other unknown DATE-OBS formats pop up. 
        try:
            date = datetime.datetime.strptime(date_obs, '%Y-%m-%dT%H:%M:%S.%f')
        except:
            print('Header DATE-OBS format does not match the format code in strptime! Inspect/correct the DATE-OBS value.')
            pdb.set_trace()
        
        days = date.day + hmsm_to_days(date.hour,date.minute,date.second,date.microsecond)
        jd = date_to_jd(date.year,date.month,days)
        psf_df['Filename'][i] = file.name
        psf_df['Time UT'][i] = header['DATE-OBS']
        psf_df['Time JD'][i] = jd
        psf_df['Airmass'][i] = header['AIRMASS']
        psf_df['Seeing'][i] = log['X seeing'][np.where(log['Filename'] == file.name.split('_')[0]+'.fits')[0][0]]
        
        #Read in source centroids for this image
        x = np.zeros(len(source_names))
        y = np.zeros(len(source_names))
        seeing = psf_df['Seeing'][i]

        for j in range(len(source_names)):
            source = source_names[j]
            x[j] = centroided_sources[source+' X'][i]
            y[j] = centroided_sources[source+' Y'][i]

         #The extract_stars() function requires the input data as an NDData object. 
        nddata = NDData(data=data)  

        #Create table of good star positions
        stars_tbl = Table()
        stars_tbl['x'] = x
        stars_tbl['y'] = y

        size = 25
        x, y = np.meshgrid(np.arange(0,size), np.arange(0,size))

        #Extract star cutouts.
        stars = extract_stars(nddata, stars_tbl, size=size)  

        fitter = fitting.LevMarLSQFitter()

        fig, ax = plt.subplots(nrows=len(stars), ncols=3, sharex=True, sharey=True, figsize=(12,40))

        #Fit a 2D Gaussian to each star. 
        for j in range(len(stars)): 
            star = stars[j]
            source = source_names[j]
            mmm_bkg = MMMBackground()
            cutout = star.data - mmm_bkg(star.data)            

            #Get the star's centroid position in the cutout. 
            dtype = [('x_0', 'f8'), ('y_0', 'f8')]
            pos = Table(data=np.zeros(1, dtype=dtype))
            source_x = stars_tbl['x'][j]
            source_y = stars_tbl['y'][j]
            pos['x_0'] = source_x - int(source_x - size/2 + 1)
            pos['y_0'] = source_y - int(source_y - size/2 + 1)

            parameters = fitgaussian(cutout)
            g2d_fit = gaussian(parameters, x, y)

            avg, med, std = sigma_clipped_stats(cutout)
            im = ax[j,0].imshow(cutout, origin='lower', vmin=med-std, vmax=med+8*std)
            divider = make_axes_locatable(ax[j,0])
            cax = divider.append_axes('right', size='5%', pad=0.05)
            fig.colorbar(im, cax=cax, orientation='vertical')
            ax[j,0].plot(pos['x_0'], pos['y_0'], 'rx')
            ax[j,0].set_ylabel(source)
            ax[j,0].text(pos['x_0'], pos['y_0']+1, '('+str(np.round(source_x,1))+', '+str(np.round(source_y,1))+')', color='r', ha='center')
            ax[j,0].axis('off')

            axins = ax[j,0].inset_axes([0.75, 0.75, 0.25, 0.25])
            axins.set_yticklabels([])
            axins.set_yticks([])
            axins.set_xticklabels([])
            axins.set_xticks([])
            axins.imshow(data, origin='lower', vmin=med-std, vmax=med+8*std)
            axins.plot(source_x, source_y, 'rx')

            im = ax[j,1].imshow(g2d_fit, origin='lower', vmin=med-std, vmax=med+8*std)
            divider = make_axes_locatable(ax[j,1])
            cax = divider.append_axes('right', size='5%', pad=0.05)
            fig.colorbar(im, cax=cax, orientation='vertical')
            ax[j,1].axis('off')

            avg, med, std = sigma_clipped_stats(cutout - g2d_fit)
            im = ax[j,2].imshow(cutout - g2d_fit, origin='lower', vmin=med-std, vmax=med+8*std)
            divider = make_axes_locatable(ax[j,2])
            cax = divider.append_axes('right', size='5%', pad=0.05)
            fig.colorbar(im, cax=cax, orientation='vertical')
            ax[j,2].axis('off')

            if j == 0:
                ax[j,0].set_title('Data')
                ax[j,1].set_title('2D Gaussian Model')
                ax[j,2].set_title('Data - Model')

            plt.tight_layout()

        output_filename = pines_path/('Objects/'+short_name+'/basic_psf_phot/'+reduced_files[i].name.split('_')[0]+'_'+'source_modeling.pdf')
        plt.savefig(output_filename)
        plt.close()

        
    return
           
Пример #19
0
def extractFlux(cnam, ccd, rccd, read, gain, ccdwin, rfile, store):
    """This extracts the flux of all apertures of a given CCD.

    The steps are (1) creation of PSF model, (2) PSF fitting, (3)
    flux extraction. The apertures are assumed to be correctly positioned.

    It returns the results as a dictionary keyed on the aperture label. Each
    entry returns a list:

    [x, ex, y, ey, fwhm, efwhm, beta, ebeta, counts, countse, sky, esky,
    nsky, nrej, flag]

    flag = bitmask. See hipercam.core to see all the options which are
    referred to by name in the code e.g. ALL_OK. The various flags can
    signal that there no sky pixels (NO_SKY), the sky aperture was off
    the edge of the window (SKY_AT_EDGE), etc.

    This code::

       >> bset = flag & TARGET_SATURATED

    determines whether the data saturation flag is set for example.

    Arguments::

       cnam     : string
          CCD identifier label

       ccd       : CCD
           the debiassed, flat-fielded CCD.

       rccd : CCD
          corresponding raw CCD, used to work out whether data are
          saturated in target aperture.

       read      : CCD
           readnoise divided by the flat-field

       gain      : CCD
           gain multiplied by the flat field

       ccdwin   : dictionary of strings
           the Window label corresponding to each Aperture

       rfile     : Rfile
           reduce file configuration parameters

       store     : dict of dicts
           see moveApers for what this contains.

    """

    # initialise flag
    flag = hcam.ALL_OK

    ccdaper = rfile.aper[cnam]

    results = {}
    # get profile params from aperture store
    mfwhm = store["mfwhm"]
    mbeta = store["mbeta"]
    method = "m" if mbeta > 0.0 else "g"

    if mfwhm <= 0:
        # die hard, die soon as there's nothing we can do.
        print((" *** WARNING: CCD {:s}: no measured FWHM to create PSF model"
               "; no extraction possible").format(cnam))
        # set flag to indicate no FWHM
        flag = hcam.NO_FWHM

        for apnam, aper in ccdaper.items():
            info = store[apnam]
            results[apnam] = {
                "x": aper.x,
                "xe": info["xe"],
                "y": aper.y,
                "ye": info["ye"],
                "fwhm": info["fwhm"],
                "fwhme": info["fwhme"],
                "beta": info["beta"],
                "betae": info["betae"],
                "counts": 0.0,
                "countse": -1,
                "sky": 0.0,
                "skye": 0.0,
                "nsky": 0,
                "nrej": 0,
                "flag": flag,
            }
        return results

    # all apertures have to be in the same window, or we can't easily make a
    # postage stamp of the data
    wnames = set(ccdwin.values())
    if len(wnames) != 1:
        print((" *** WARNING: CCD {:s}: not all apertures"
               " lie within the same window; no extraction possible"
               ).format(cnam))

        # set flag to indicate no extraction
        flag = hcam.NO_EXTRACTION

        # return empty results
        for apnam, aper in ccdaper.items():
            info = store[apnam]
            results[apnam] = {
                "x": aper.x,
                "xe": info["xe"],
                "y": aper.y,
                "ye": info["ye"],
                "fwhm": info["fwhm"],
                "fwhme": info["fwhme"],
                "beta": info["beta"],
                "betae": info["betae"],
                "counts": 0.0,
                "countse": -1,
                "sky": 0.0,
                "skye": 0.0,
                "nsky": 0,
                "nrej": 0,
                "flag": flag,
            }
            return results
    wnam = wnames.pop()

    # PSF params are in binned pixels, so find binning
    bin_fac = ccd[wnam].xbin

    # create PSF model
    if method == "m":
        psf_model = MoffatPSF(beta=mbeta, fwhm=mfwhm / bin_fac)
    else:
        psf_model = IntegratedGaussianPRF(sigma=mfwhm *
                                          gaussian_fwhm_to_sigma / bin_fac)

    # force photometry only at aperture positions
    # this means PSF shape and positions are fixed, we are only fitting flux
    if rfile["psf_photom"]["positions"] == "fixed":
        psf_model.x_0.fixed = True
        psf_model.y_0.fixed = True

    # create instances for PSF photometry
    gfac = float(rfile["psf_photom"]["gfac"])
    sclip = float(rfile["sky"]["thresh"])
    daogroup = DAOGroup(gfac * mfwhm / bin_fac)
    mmm_bkg = MMMBackground(sigma_clip=SigmaClip(sclip))
    fitter = LevMarLSQFitter()
    fitshape_box_size = int(2 * int(rfile["psf_photom"]["fit_half_width"]) + 1)
    fitshape = (fitshape_box_size, fitshape_box_size)

    photometry_task = BasicPSFPhotometry(
        group_maker=daogroup,
        bkg_estimator=mmm_bkg,
        psf_model=psf_model,
        fitter=fitter,
        fitshape=fitshape,
    )

    # initialise flag
    flag = hcam.ALL_OK

    # extract Windows relevant for these apertures
    wdata = ccd[wnam]
    wraw = rccd[wnam]

    # extract sub-windows that include all of the apertures, plus a little
    # extra around the edges.
    x1 = min([ap.x - ap.rsky2 - wdata.xbin for ap in ccdaper.values()])
    x2 = max([ap.x + ap.rsky2 + wdata.xbin for ap in ccdaper.values()])
    y1 = min([ap.y - ap.rsky2 - wdata.ybin for ap in ccdaper.values()])
    y2 = max([ap.y + ap.rsky2 + wdata.ybin for ap in ccdaper.values()])

    # extract sub-Windows
    swdata = wdata.window(x1, x2, y1, y2)
    swraw = wraw.window(x1, x2, y1, y2)

    # compute pixel positions of apertures in windows
    xpos, ypos = zip(*((swdata.x_pixel(ap.x), swdata.y_pixel(ap.y))
                       for ap in ccdaper.values()))
    positions = Table(names=["x_0", "y_0"], data=(xpos, ypos))

    # do the PSF photometry
    photom_results = photometry_task(swdata.data, init_guesses=positions)
    slevel = mmm_bkg(swdata.data)

    # unpack the results and check apertures
    for apnam, aper in ccdaper.items():
        try:
            # reset flag
            flag = hcam.ALL_OK

            result_row = photom_results[photom_results["id"] == int(apnam)]
            if len(result_row) == 0:
                flag |= hcam.NO_DATA
                raise hcam.HipercamError(
                    "no source in PSF photometry for this aperture")
            elif len(result_row) > 1:
                flag |= hcam.NO_EXTRACTION
                raise hcam.HipercamError(
                    "ambiguous lookup for this aperture in PSF photometry")
            else:
                result_row = result_row[0]

            # compute X, Y arrays over the sub-window relative to the centre
            # of the aperture and the distance squared from the centre (Rsq)
            # to save a little effort.
            x = swdata.x(np.arange(swdata.nx)) - aper.x
            y = swdata.y(np.arange(swdata.ny)) - aper.y
            X, Y = np.meshgrid(x, y)
            Rsq = X**2 + Y**2

            # size of a pixel which is used to taper pixels as they approach
            # the edge of the aperture to reduce pixellation noise
            size = np.sqrt(wdata.xbin * wdata.ybin)

            # target selection, accounting for extra apertures and allowing
            # pixels to contribute if their centres are as far as size/2 beyond
            # the edge of the circle (but with a tapered weight)
            dok = Rsq < (aper.rtarg + size / 2.0)**2
            if not dok.any():
                # check there are some valid pixels
                flag |= hcam.NO_DATA
                raise hcam.HipercamError("no valid pixels in aperture")

            # check for saturation and nonlinearity
            if cnam in rfile.warn:
                if swraw.data[dok].max() >= rfile.warn[cnam]["saturation"]:
                    flag |= hcam.TARGET_SATURATED

                if swraw.data[dok].max() >= rfile.warn[cnam]["nonlinear"]:
                    flag |= hcam.TARGET_NONLINEAR
            else:
                warnings.warn(
                    "CCD {:s} has no nonlinearity or saturation levels set")

            counts = result_row["flux_fit"]
            countse = result_row["flux_unc"]
            info = store[apnam]

            results[apnam] = {
                "x": aper.x,
                "xe": info["xe"],
                "y": aper.y,
                "ye": info["ye"],
                "fwhm": info["fwhm"],
                "fwhme": info["fwhme"],
                "beta": info["beta"],
                "betae": info["betae"],
                "counts": counts,
                "countse": countse,
                "sky": slevel,
                "skye": 0,
                "nsky": 0,
                "nrej": 0,
                "flag": flag,
            }

        except hcam.HipercamError as err:

            info = store[apnam]
            flag |= hcam.NO_EXTRACTION

            results[apnam] = {
                "x": aper.x,
                "xe": info["xe"],
                "y": aper.y,
                "ye": info["ye"],
                "fwhm": info["fwhm"],
                "fwhme": info["fwhme"],
                "beta": info["beta"],
                "betae": info["betae"],
                "counts": 0.0,
                "countse": -1,
                "sky": 0.0,
                "skye": 0.0,
                "nsky": 0,
                "nrej": 0,
                "flag": flag,
            }

    # finally, we are done
    return results
Пример #20
0
    def Flux(self,x,y):
        x = int(x)
        y = int(y)
        r = 25
        data = self.hdulist[self.fz].data[x-r:x+r,y-r:y+r]
        data = (lacosmic.lacosmic(data,2,10,10, effective_gain = self.gain, readnoise = self.readnoise))[0]
        bkgrms = MADStdBackgroundRMS()
        std = bkgrms(data)
        iraffind = IRAFStarFinder(threshold=self.limit*std,
                                   fwhm=self.sigma_psf*gaussian_sigma_to_fwhm,
                                   minsep_fwhm=0.01, roundhi=5.0, roundlo=-5.0,
                                   sharplo=0.0, sharphi=2.0)
        daogroup = DAOGroup(2.0*self.sigma_psf*gaussian_sigma_to_fwhm)
        mmm_bkg = MMMBackground()
        psf_model = IntegratedGaussianPRF(sigma=self.sigma_psf)
        from photutils.psf import IterativelySubtractedPSFPhotometry
        photometry = IterativelySubtractedPSFPhotometry(finder=iraffind,
                                                         group_maker=daogroup,
                                                         bkg_estimator=mmm_bkg,
                                                         psf_model=psf_model,
                                                         fitter=LevMarLSQFitter(),
                                                         niters=1, fitshape=(21,21))
        

        
        result_tab = photometry(image=data)   
        
        """
        if plot == 1:
            residual_image = photometry.get_residual_image()
            print(result_tab['x_fit','y_fit'])
            plt.figure(self.filename+' data')
            plt.imshow(data, cmap='viridis',
                       aspect=1, interpolation='nearest', origin='lower')
            plt.show()
            plt.figure(self.filename+' residual')
            plt.imshow(residual_image, cmap='viridis',
                       aspect=1, interpolation='nearest', origin='lower')
            plt.show()
            plt.figure(self.filename+' PSF')
            plt.imshow(data-residual_image, cmap='viridis',
                       aspect=1, interpolation='nearest', origin='lower')
            plt.show()
        """
        
        if len(result_tab) > 5:
            return(0,0) 
        if len(result_tab) ==0:
            print('None')
            return(0,0) 
        result_tab['Minus'] = np.zeros(len(result_tab))
        for i in range(len(result_tab)):
            if 18.5 < result_tab['x_fit'][i] < 28.5 and 18.5 < result_tab['y_fit'][i] < 28.5:
            #if 15 < result_tab['x_fit'][i] < 25 and 15 < result_tab['y_fit'][i] < 25:
                result_tab['Minus'][i] = 1
            else:
                result_tab['Minus'][i] = 0
        mask = result_tab['Minus'] == 1.0
        result_tab = result_tab[mask]
        if len(result_tab) != 1:
            return(0,0)   
        flux_counts = float(result_tab['flux_fit'][0])
        flux_unc = float(result_tab['flux_unc'][0])
        flux_unc = flux_unc/flux_counts
        return(flux_counts,flux_unc)
Пример #21
0
def run_iterative_PSF_photometry(setup,
                                 reduction_metadata,
                                 image_path,
                                 log,
                                 diagnostics=False):
    """Function to perform PSF-fitting photometry to all objects detected
    in an image, using DAOphot-standard routines from photutils.
    """

    iterate = False

    log.info('Performing PSF-fitting photometry on ' +
             os.path.basename(image_path))

    image_data = fits.getdata(image_path)

    psf_size = reduction_metadata.reduction_parameters[1]['PSF_SIZE'][0]

    image_idx = reduction_metadata.images_stats[1]['IM_NAME'].tolist().index(
        os.path.basename(image_path))

    fwhm = reduction_metadata.images_stats[1]['FWHM_X'][image_idx]

    log.info('Applying psf size = ' + str(psf_size))
    log.info('         fwhm = ' + str(fwhm))

    psf_radius = psf_size * fwhm

    log.info('         psf size = ' + str(psf_radius))

    star_finder = starfind.build_star_finder(reduction_metadata, image_path,
                                             log)

    daogroup = DAOGroup(2.0 * fwhm)

    log.info(' -> Build star grouping object')

    sigma_clip = SigmaClip(sigma=3.0)

    mmm_bkg = MMMBackground(sigma_clip=sigma_clip)

    log.info(' -> Build sky background model')

    fitter = LevMarLSQFitter()

    psf_model = IntegratedGaussianPRF(sigma=fwhm)

    log.info(' -> Build PSF model')

    psf_x = calc_psf_dimensions(psf_size, fwhm, log)

    if iterate:
        photometer = IterativelySubtractedPSFPhotometry(finder=star_finder,
                                                        group_maker=daogroup,
                                                        bkg_estimator=mmm_bkg,
                                                        psf_model=psf_model,
                                                        fitter=fitter,
                                                        niters=3,
                                                        fitshape=(psf_x,
                                                                  psf_x))
    else:
        photometer = BasicPSFPhotometry(finder=star_finder,
                                        group_maker=daogroup,
                                        bkg_estimator=mmm_bkg,
                                        psf_model=psf_model,
                                        fitter=fitter,
                                        fitshape=(psf_x, psf_x))
    photometry = photometer(image=image_data)
    print photometry.colnames
    print photometry['flux_unc'].data

    log.info('Photometry warnings, if any: ' +
             repr(fitter.fit_info['message']))

    log.info('Executed photometry of ' + str(len(photometry)) + ' stars')

    if diagnostics:
        store_residual_image(setup, photometer, image_path, log)

    return photometry
Пример #22
0
def daophot(cnam, ccd, xlo, xhi, ylo, yhi, niters, method, fwhm, beta, gfac,
            thresh, rejthresh):
    """
    Perform iterative PSF photometry and star finding on region of CCD
    """
    print(xlo, ylo, xhi, yhi)
    # first check we are within a single window
    wnam1 = ccd.inside(xlo, ylo, 2)
    wnam2 = ccd.inside(xhi, yhi, 2)
    if wnam1 != wnam2:
        raise hcam.HipercamError(
            'PSF photometry cannot currently be run across seperate windows')
    wnam = wnam1
    print(wnam)
    # background stats from whole windpw
    # estimate background RMS
    wind = ccd[wnam]

    rms_func = MADStdBackgroundRMS(sigma_clip=SigmaClip(sigma=rejthresh))
    bkg_rms = rms_func(wind.data)
    bkg_func = MMMBackground(sigma_clip=SigmaClip(sigma=rejthresh))
    bkg = bkg_func(wind.data)
    print('  Background estimate = {}, BKG RMS = {}'.format(bkg, bkg_rms))

    # crop window to ROI
    wind = ccd[wnam].window(xlo, xhi, ylo, yhi)

    # correct FWHM for binning
    fwhm /= wind.xbin
    if method == 'm':
        psf_model = MoffatPSF(fwhm, beta)
        print('  FWHM = {:.1f}, BETA={:.1f}'.format(fwhm, beta))
    else:
        psf_model = IntegratedGaussianPRF(sigma=fwhm * gaussian_fwhm_to_sigma)
        print('  FWHM = {:.1f}'.format(fwhm))

    # region to extract around positions for fits
    fitshape = int(5 * fwhm)
    # ensure odd
    if fitshape % 2 == 0:
        fitshape += 1

    photometry_task = DAOPhotPSFPhotometry(gfac * fwhm,
                                           thresh * bkg_rms,
                                           fwhm,
                                           psf_model,
                                           fitshape,
                                           niters=niters,
                                           sigma=rejthresh)

    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        results = photometry_task(wind.data - bkg)

    # filter out junk fits
    tiny = 1e-30
    bad_errs = (results['flux_unc'] < tiny) | (results['x_0_unc'] < tiny) | (
        results['y_0_unc'] < tiny)
    results = results[~bad_errs]

    results.write('table_{}.fits'.format(cnam))
    print('  found {} stars'.format(len(results)))

    xlocs, ylocs = results['x_fit'], results['y_fit']

    # convert to device coordinates
    xlocs = wind.x(xlocs)
    ylocs = wind.y(ylocs)
    return xlocs, ylocs
Пример #23
0
def epsf_phot(target, centroided_sources, plots=False):
    def hmsm_to_days(hour=0,min=0,sec=0,micro=0):
        """
        Convert hours, minutes, seconds, and microseconds to fractional days.
        
        """
        days = sec + (micro / 1.e6)
        days = min + (days / 60.)
        days = hour + (days / 60.)
        return days / 24.
    
    def date_to_jd(year,month,day):
        """
        Convert a date to Julian Day.
        
        Algorithm from 'Practical Astronomy with your Calculator or Spreadsheet', 
            4th ed., Duffet-Smith and Zwart, 2011.
        
        """
        if month == 1 or month == 2:
            yearp = year - 1
            monthp = month + 12
        else:
            yearp = year
            monthp = month
        
        # this checks where we are in relation to October 15, 1582, the beginning
        # of the Gregorian calendar.
        if ((year < 1582) or
            (year == 1582 and month < 10) or
            (year == 1582 and month == 10 and day < 15)):
            # before start of Gregorian calendar
            B = 0
        else:
            # after start of Gregorian calendar
            A = math.trunc(yearp / 100.)
            B = 2 - A + math.trunc(A / 4.)
            
        if yearp < 0:
            C = math.trunc((365.25 * yearp) - 0.75)
        else:
            C = math.trunc(365.25 * yearp)
            
        D = math.trunc(30.6001 * (monthp + 1))
        
        jd = B + C + D + day + 1720994.5
        
        return jd

    pines_path = pines_dir_check()
    short_name = short_name_creator(target)
    reduced_path = pines_path/('Objects/'+short_name+'/reduced/')
    reduced_filenames = natsort.natsorted([x.name for x in reduced_path.glob('*.fits')])
    reduced_files = np.array([reduced_path/i for i in reduced_filenames])

    centroided_sources.columns = centroided_sources.columns.str.strip()
    source_names = natsort.natsorted(list(set([i.split(' ')[0]+' '+i.split(' ')[1] for i in centroided_sources.keys() if (i[0] == '2') or (i[0] == 'R')])))
    
    #Create output plot directories for each source.
    if plots:
        for name in source_names:
            #If the folders are already there, delete them. 
            source_path = (pines_path/('Objects/'+short_name+'/psf_phot/'+name+'/'))
            if source_path.exists():
                shutil.rmtree(source_path)
            #Create folders.
            os.mkdir(source_path)

    #Declare a new dataframe to hold the information for all targets for this .
    columns = ['Filename', 'Time UT', 'Time JD', 'Airmass', 'Seeing']
    for i in range(0, len(source_names)):
        columns.append(source_names[i]+' Flux')
        columns.append(source_names[i]+' Flux Error')
    psf_df = pd.DataFrame(index=range(len(reduced_files)), columns=columns)
    output_filename = pines_path/('Objects/'+short_name+'/psf_phot/'+short_name+'_psf_phot.csv')

    for i in range(0, len(reduced_files)):
        #Read in image data/header. 
        file = reduced_files[i]
        data = fits.open(file)[0].data
        header = fits.open(file)[0].header
        print('{}, image {} of {}.'.format(file.name, i+1, len(reduced_files)))

        #Read in some supporting information.
        log_path = pines_path/('Logs/'+file.name.split('.')[0]+'_log.txt')
        log = pines_log_reader(log_path)
        date_obs = header['DATE-OBS']
        #Catch a case that can cause datetime strptime to crash; Mimir headers sometimes have DATE-OBS with seconds specified as 010.xx seconds, when it should be 10.xx seconds. 
        if len(date_obs.split(':')[-1].split('.')[0]) == 3:
            date_obs = date_obs.split(':')[0] + ':' + date_obs.split(':')[1] + ':' + date_obs.split(':')[-1][1:]
        #Keep a try/except clause here in case other unknown DATE-OBS formats pop up. 
        try:
            date = datetime.datetime.strptime(date_obs, '%Y-%m-%dT%H:%M:%S.%f')
        except:
            print('Header DATE-OBS format does not match the format code in strptime! Inspect/correct the DATE-OBS value.')
            pdb.set_trace()
        
        days = date.day + hmsm_to_days(date.hour,date.minute,date.second,date.microsecond)
        jd = date_to_jd(date.year,date.month,days)
        psf_df['Filename'][i] = file.name
        psf_df['Time UT'][i] = header['DATE-OBS']
        psf_df['Time JD'][i] = jd
        psf_df['Airmass'][i] = header['AIRMASS']
        psf_df['Seeing'][i] = log['X seeing'][np.where(log['Filename'] == file.name.split('_')[0]+'.fits')[0][0]]
        
        #Read in source centroids for this image
        x = np.zeros(len(source_names))
        y = np.zeros(len(source_names))
        for j in range(len(source_names)):
            source = source_names[j]
            x[j] = centroided_sources[source+' X'][i]
            y[j] = centroided_sources[source+' Y'][i]

        #Extract pixel cutouts of our stars, so let’s explicitly exclude stars that are too close to the image boundaries (because they cannot be extracted).
        size = 13
        hsize = (size - 1) / 2
        #mask = ((x > hsize) & (x < (data.shape[1] -1 - hsize)) & (y > hsize) & (y < (data.shape[0] -1 - hsize)) & (y > 100) & (y < 923))

        #Create table of good star positions
        stars_tbl = Table()
        stars_tbl['x'] = x
        stars_tbl['y'] = y
        
        #Subtract background (star cutouts from which we build the ePSF must have background subtracted).
        mean_val, median_val, std_val = sigma_clipped_stats(data, sigma=2.)  
        data -= median_val
        
        #Replace nans in data using Gaussian. 
        # kernel = Gaussian2DKernel(x_stddev=0.5)
        # data = interpolate_replace_nans(data, kernel)

        #The extract_stars() function requires the input data as an NDData object. 
        nddata = NDData(data=data)  

        #Extract star cutouts.
        stars = extract_stars(nddata, stars_tbl, size=size)  
                        

        #Plot. 
        nrows = 5
        ncols = 5
        fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(10, 10), squeeze=True)
        ax = ax.ravel()
        for j in range(len(stars)):           
            norm = simple_norm(stars[j], 'log', percent=99.)
            ax[j].imshow(stars[j].data, norm=norm, origin='lower', cmap='viridis')

        pdb.set_trace()

        #Construct the ePSF using the star cutouts.
        epsf_fitter = EPSFFitter()
        epsf_builder = EPSFBuilder(maxiters=4, progress_bar=False, fitter=epsf_fitter)   

        try:
            epsf, fitted_stars = epsf_builder(stars)
            output_filename = pines_path/('Objects/'+short_name+'/psf_phot/'+short_name+'_psf_phot.csv')

            for j in range(len(stars)):
                star = stars[j]
                source_name = source_names[j]
                sigma_psf = 1.85

                dtype = [('x_0', 'f8'), ('y_0', 'f8')]
                pos = Table(data=np.zeros(1, dtype=dtype))
                source_x = stars_tbl['x'][j]
                source_y = stars_tbl['y'][j]
                pos['x_0'] = source_x - int(source_x - size/2 + 1)
                pos['y_0'] = source_y - int(source_y - size/2 + 1)

                daogroup = DAOGroup(4.0*sigma_psf*gaussian_sigma_to_fwhm)
                mmm_bkg = MMMBackground()
                photometry = BasicPSFPhotometry(group_maker=daogroup,
                                    bkg_estimator=mmm_bkg,
                                    psf_model=epsf,
                                    fitter=LevMarLSQFitter(),
                                    fitshape=(13,13),
                                    aperture_radius=4.)
                

                result_tab = photometry(image=star, init_guesses=pos)
                residual_image = photometry.get_residual_image()
                psf_df[source_name+' Flux'][i] = result_tab['flux_fit'][0]
                psf_df[source_name+' Flux Error'][i] = result_tab['flux_unc'][0]

                if plots:
                    fig, ax = plt.subplots(nrows=1, ncols=3, figsize=(12,4))
                    im = ax[0].imshow(star, origin='lower')
                    divider = make_axes_locatable(ax[0])
                    cax = divider.append_axes('right', size='5%', pad=0.05)
                    fig.colorbar(im, cax=cax, orientation='vertical')
                    ax[0].plot(result_tab['x_fit'][0], result_tab['y_fit'][0], 'rx')
                    ax[0].set_title('Data')

                    im2 = ax[1].imshow(epsf.data, origin='lower')
                    ax[1].set_title('EPSF Model')
                    divider = make_axes_locatable(ax[1])
                    cax = divider.append_axes('right', size='5%', pad=0.05)
                    fig.colorbar(im2, cax=cax, orientation='vertical')

                    im3 = ax[2].imshow(residual_image, origin='lower')
                    ax[2].set_title('Residual Image')
                    divider = make_axes_locatable(ax[2])
                    cax = divider.append_axes('right', size='5%', pad=0.05)
                    fig.colorbar(im3, cax=cax, orientation='vertical')
                    plt.suptitle(source_name+'\n'+reduced_files[i].name+', image '+str(i+1)+' of '+str(len(reduced_files)))
                    plt.subplots_adjust(wspace=0.5, top=0.95, bottom = 0.05)
                    plot_output_name = pines_path/('Objects/'+short_name+'/psf_phot/'+source_name+'/'+str(i).zfill(4)+'.jpg')
                    plt.savefig(plot_output_name)
                    plt.close()
        except:
            print('')
            print('EPSF BUILDER FAILED, SKIPPING IMAGE.')
            print('')
        #Plot the ePSF. 
        # plt.figure()
        # norm = simple_norm(epsf.data, 'log', percent=99.)
        # plt.imshow(epsf.data, norm=norm, origin='lower', cmap='viridis')
        # cb = plt.colorbar()
        # plt.tight_layout()   

        

    print('Saving psf photometry output to {}.'.format(output_filename))
    with open(output_filename, 'w') as f:
        for j in range(len(psf_df)):
            if j == 0:
                f.write('{:>21s}, {:>22s}, {:>17s}, {:>7s}, {:>7s}, '.format('Filename', 'Time UT', 'Time JD', 'Airmass', 'Seeing'))
                for i in range(len(source_names)):
                    if i != len(source_names) - 1:
                        f.write('{:>20s}, {:>26s}, '.format(source_names[i]+' Flux', source_names[i]+' Flux Error'))
                    else:
                        f.write('{:>20s}, {:>26s}\n'.format(source_names[i]+' Flux', source_names[i]+' Flux Error'))

            format_string = '{:21s}, {:22s}, {:17.9f}, {:7.2f}, {:7.1f}, '

            #If the seeing value for this image is 'nan' (a string), convert it to a float. 
            #TODO: Not sure why it's being read in as a string, fix that. 
            if type(psf_df['Seeing'][j]) == str:
                psf_df['Seeing'][j] = float(psf_df['Seeing'][j])

            #Do a try/except clause for writeout, in case it breaks in the future. 
            try:
                f.write(format_string.format(psf_df['Filename'][j], psf_df['Time UT'][j], psf_df['Time JD'][j], psf_df['Airmass'][j], psf_df['Seeing'][j]))
            except:
                print('Writeout failed! Inspect quantities you are trying to write out.')
                pdb.set_trace()
            for i in range(len(source_names)):                    
                if i != len(source_names) - 1:
                    format_string = '{:20.11f}, {:26.11f}, '
                else:
                    format_string = '{:20.11f}, {:26.11f}\n'
                
                f.write(format_string.format(psf_df[source_names[i]+' Flux'][j], psf_df[source_names[i]+' Flux Error'][j]))
    print('')    
    return
           
Пример #24
0
def jwst_camera_fpa_data(data_dir,
                         pattern,
                         standardized_data_dir,
                         parameters,
                         overwrite_source_extraction=False):
    """Generate standardized focal plane alignment (fpa) data
       based on JWST camera image.
    """

    save_plot = parameters['save_plot']

    file_list = glob.glob(os.path.join(data_dir, '*{}'.format(pattern)))

    if len(file_list) == 0:
        raise RuntimeError('No data found')

    file_list.sort()
    for f in file_list:

        plt.close('all')

        print()
        print('Data directory: {}'.format(data_dir))
        print('Image being processed: {}'.format(f))

        im = datamodels.open(f)
        if hasattr(im, 'data') is False:
            im.data = fits.getdata(f)
            #im.dq    = np.zeros(im.data.shape)

        header_info = OrderedDict()

        for attribute in 'telescope'.split():
            header_info[attribute] = getattr(im.meta, attribute)

        # observations
        for attribute in 'date time visit_number visit_id visit_group activity_id program_number'.split(
        ):
            header_info['observation_{}'.format(attribute)] = getattr(
                im.meta.observation, attribute)

        header_info['epoch_isot'] = '{}T{}'.format(
            header_info['observation_date'], header_info['observation_time'])

        #  instrument
        for attribute in 'name filter pupil detector'.split():
            header_info['instrument_{}'.format(attribute)] = getattr(
                im.meta.instrument, attribute)

        # subarray
        for attribute in 'name'.split():
            header_info['subarray_{}'.format(attribute)] = getattr(
                im.meta.subarray, attribute)

        # aperture
        for attribute in 'name position_angle pps_name'.split():
            try:
                value = getattr(im.meta.aperture, attribute)
            except AttributeError:
                value = None

            header_info['aperture_{}'.format(attribute)] = value

        header_info['INSTRUME'] = header_info['instrument_name']
        header_info['SIAFAPER'] = header_info['aperture_name']

        instrument_name = getattr(im.meta.instrument, 'name')
        instrument_detector = getattr(im.meta.instrument, 'detector')
        instrument_filter = getattr(im.meta.instrument, 'filter')

        # temporary solution, this should come from populated aperture attributes
        #if header_info['subarray_name'] == 'FULL':
        #    master_apertures = pysiaf.read.read_siaf_detector_layout()
        #    if header_info['instrument_name'].lower() in ['niriss', 'miri']:
        #        header_info['SIAFAPER'] = master_apertures['AperName'][np.where(master_apertures['InstrName']==header_info['instrument_name'])[0][0]]
        #    elif header_info['instrument_name'].lower() in ['fgs']:
        #        header_info['SIAFAPER'] = 'FGS{}_FULL'.format(header_info['instrument_detector'][-1])
        #    elif header_info['instrument_name'].lower() in ['nircam']:
        #        header_info['SIAFAPER'] = header_info['aperture_name']
        #else:
        #    sys.exit('Only FULL arrays are currently supported.')

        # target
        for attribute in 'ra dec catalog_name proposer_name'.split():
            header_info['target_{}'.format(attribute)] = getattr(
                im.meta.target, attribute)

        # pointing
        for attribute in 'ra_v1 dec_v1 pa_v3'.split():
            try:
                value = getattr(im.meta.pointing, attribute)
            except AttributeError:
                value = None
            header_info['pointing_{}'.format(attribute)] = value

        # add HST style keywords
        header_info['PROGRAM_VISIT'] = '{}_{}'.format(
            header_info['observation_program_number'],
            header_info['observation_visit_id'])
        header_info['PROPOSID'] = header_info['observation_program_number']
        header_info['DATE-OBS'] = header_info['observation_date']
        header_info['TELESCOP'] = header_info['telescope']
        header_info['INSTRUME'] = header_info['instrument_name']
        try:
            header_info['APERTURE'] = header_info['SIAFAPER']
        except KeyError:
            header_info['APERTURE'] = None
        header_info['CHIP'] = 0

        # TBD: Need to remove making yet another directory
        #extracted_sources_dir = os.path.join(standardized_data_dir, 'extraction')
        #if os.path.isdir(extracted_sources_dir) is False:
        #    os.makedirs(extracted_sources_dir)
        extracted_sources_file = os.path.join(
            standardized_data_dir,  #extracted_sources_dir,
            '{}_extracted_sources.fits'.format(
                os.path.basename(f).split('.')[0]))

        mask_extreme_slope_values = False
        parameters['maximum_slope_value'] = 1000.

        # Check if extracted_sources_file exists, or overwrite_source_extraction is set to True
        if (not os.path.isfile(extracted_sources_file)) or (
                overwrite_source_extraction):
            data = copy.deepcopy(im.data)
            #dq = copy.deepcopy(im.dq)

            # Convert image data to counts per second
            photmjsr = getattr(im.meta.photometry, 'conversion_megajanskys')
            data_cps = data / photmjsr

            if mask_extreme_slope_values:
                # clean up extreme slope values
                bad_index = np.where(
                    np.abs(data) > parameters['maximum_slope_value'])
                data[bad_index] = 0.
                dq[bad_index] = -1

            bkgrms = MADStdBackgroundRMS()
            mmm_bkg = MMMBackground()
            bgrms = bkgrms(data_cps)
            bgavg = mmm_bkg(data_cps)

            # Default parameters that generally works for NIRCam/NIRISS images
            sigma_factor = 10
            round_lo, round_hi = 0.0, 0.6
            sharp_lo, sharp_hi = 0.3, 1.4
            fwhm_lo, fwhm_hi = 1.0, 20.0
            fwhm = 2.0
            minsep_fwhm = 7  # NOTE: minsep_fwhm>5 to reject artifacts around saturated stars
            flux_percent_lo, flux_percent_hi = 10, 99

            # if 'sharp_lo' in parameters:
            #    sharp_lo = parameters['sharp_lo']

            ###
            ### TBD1: Relocate params below to config parts/files
            ###
            # Use different criteria for selecting good stars
            if parameters['nominalpsf']:
                # If using Nominal PSF models
                if instrument_name == 'NIRISS':
                    #fwhm_lo, fwhm_hi = 1.0, 2.0
                    sharp_lo, sharp_hi = 0.6, 1.4
                elif instrument_name == 'FGS':
                    #fwhm_lo, fwhm_hi = 1.0, 1.4
                    sharp_lo, sharp_hi = 0.6, 1.4
                elif instrument_name == 'NIRCAM':
                    sharp_lo, sharp_hi = 0.6, 1.4
                elif instrument_name == 'MIRI':
                    sharp_lo, sharp_hi = 0.8, 1.0
                    fwhm_lo, fwhm_hi = 1.5, 2.2
                    sigma_factor = 3
                elif instrument_name == 'NIRSPEC':
                    sharp_lo, sharp_hi = 0.6, 0.8
                    round_lo, round_hi = 0.0, 0.3
                    fwhm_lo, fwhm_hi = 1.0, 1.75
            else:
                ###
                ### For OTE commissioning, tweak the params below after finding
                ### the correct ranges by runnin the photometry notebook.
                ###

                # If using Commissioning (non-phased) PSF models
                if instrument_name == 'NIRISS':
                    sharp_lo, sharp_hi = 0.6, 1.4
                    fwhm_lo, fwhm_hi = 1.4, 2.4

################################################################################
################################################################################
################################################################################

                elif instrument_name == 'FGS':
                    sigma_factor = 10
                    minsep_fwhm = 2.5
                    sharp_lo, sharp_hi = 0.45, 0.7
                    round_lo, round_hi = 0.0, 0.3
                    flux_percent_lo, flux_percent_hi = 2, 99
                    fwhm = 4

################################################################################
################################################################################
################################################################################

# Below works well for F200W and F356W images

                elif instrument_name == 'NIRCAM':
                    sigma_factor = 3
                    minsep_fwhm = 2.5
                    sharp_lo, sharp_hi = 0.5, 0.7
                    round_lo, round_hi = 0.0, 0.2
                    flux_percent_lo, flux_percent_hi = 2, 99
                    if 'F200W' in instrument_filter:
                        fwhm = 10
                    elif 'F356W' in instrument_filter:
                        fwhm = 8
                    elif 'F090W' in instrument_filter:
                        fwhm = 5.5
                    elif 'F277W' in instrument_filter:
                        fwhm = 6.5
                    else:
                        fwhm = 3


################################################################################
################################################################################
################################################################################

                elif instrument_name == 'MIRI':
                    sharl_lo, sharp_hi = 0.5, 1.0
                    fwhm_lo, fwhm_hi = 1.5, 2.2
                    sigma_factor = 3
                elif instrument_name == 'NIRSPEC':
                    sharp_lo, sharp_hi = 0.5, 0.8
                    round_lo, round_hi = 0.0, 0.3
                    fwhm_lo, fwhm_hi = 1.0, 1.75

            # Use IRAFStarFinder for source detection
            iraffind = IRAFStarFinder(threshold=sigma_factor * bgrms + bgavg,
                                      fwhm=fwhm,
                                      minsep_fwhm=minsep_fwhm,
                                      roundlo=round_lo,
                                      roundhi=round_hi,
                                      sharplo=sharp_lo,
                                      sharphi=sharp_hi)

            # Create default mask with all False values
            datamask = np.zeros(
                data_cps.shape,
                dtype=bool)  # This creates an array with all False

            # Mask the left (for NRS1) and right regions (for NRS2) for NIRSpec
            if instrument_detector == 'NRS1':
                datamask[:, :1023] = True  # Mask everything on the left side
            elif instrument_detector == 'NRS2':
                datamask[:, 1024:] = True  # Mask everything on the right side

            iraf_extracted_sources = iraffind(data_cps, mask=datamask)

            # Perform some basic filtering

            # Remove sources based on flux percentile
            # 10-99% works well for filtering out too faint or saturated sources
            flux_min = np.percentile(iraf_extracted_sources['flux'],
                                     flux_percent_lo)
            flux_max = np.percentile(iraf_extracted_sources['flux'],
                                     flux_percent_hi)
            iraf_extracted_sources.remove_rows(
                np.where(iraf_extracted_sources['flux'] < flux_min))
            iraf_extracted_sources.remove_rows(
                np.where(iraf_extracted_sources['flux'] > flux_max))

            # Also remove sources based on fwhm
            ###
            ### Don't use below for now - 2/23/2022 (Don't use it unless we get lots of bad sources)
            ###
            #iraf_extracted_sources.remove_rows(np.where(iraf_extracted_sources['fwhm']<fwhm_lo))
            #iraf_extracted_sources.remove_rows(np.where(iraf_extracted_sources['fwhm']>fwhm_hi))

            # Now improve the positions by re-running centroiding algorithm if necessary.
            # NOTE: For now, re-centroiding will be turned off

            ###
            ### TBD2: Add re-centroiding algorithm adopted from Paul here
            ###
            #xarr = sources_masked['xcentroid']
            #yarr = sources_masked['ycentroid']
            #newx, newy = centroid_sources(data_cps, xarr, yarr, box_size=5, centroid_func=centroid_2dg)
            #coords = np.column_stack((newx, newy))
            #srcaper = CircularAnnulus(coords, r_in=1, r_out=3)
            #srcaper_masks = srcaper.to_mask(method='center')
            #satflag = np.zeros((len(newx),),dtype=int)
            #i = 0
            #for mask in srcaper_masks:
            #    srcaper_dq = mask.multiply(dqarr)
            #    srcaper_dq_1d = srcaper_dq[mask.data>0]
            #    badpix = np.logical_and(srcaper_dq_1d>2, srcaper_dq_1d<7)
            #    reallybad = np.where(srcaper_dq_1d==1)
            #    if ((len(srcaper_dq_1d[badpix]) > 1) or (len(srcaper_dq_1d[reallybad]) > 0)):
            #        satflag[i] = 1
            #        i =+1
            #goodx = newx[np.where(satflag==0)]
            #goody = newy[np.where(satflag==0)]
            #print('Number of sources before removing saturated or bad pixels: ', len(xarr))
            #print('Number of sources without saturated or bad pixels: ', len(goodx))
            #print(' ')
            #coords = np.column_stack((goodx,goody))

            print('Number of extracted sources after filtering: {} sources'.
                  format(len(iraf_extracted_sources)))

            if parameters['use_epsf'] is True:
                size = 25
                hsize = (size - 1) / 2
                x = iraf_extracted_sources['xcentroid']
                y = iraf_extracted_sources['ycentroid']
                mask = ((x > hsize) & (x < (data_cps.shape[1] - 1 - hsize)) &
                        (y > hsize) & (y < (data_cps.shape[0] - 1 - hsize)))
                stars_tbl = Table()
                stars_tbl['x'] = x[mask]
                stars_tbl['y'] = y[mask]
                print('Using {} stars to build epsf'.format(len(stars_tbl)))

                data_cps_bkgsub = data_cps.copy()
                data_cps_bkgsub -= bgavg
                nddata = NDData(data=data_cps_bkgsub)
                stars = extract_stars(nddata, stars_tbl, size=size)

                #
                # Figure - PSF stars
                #
                nrows = 10
                ncols = 10
                fig, ax = plt.subplots(nrows=nrows,
                                       ncols=ncols,
                                       figsize=(20, 20),
                                       squeeze=True)
                ax = ax.ravel()
                for i in range(nrows * ncols):
                    if i <= len(stars) - 1:
                        norm = simple_norm(stars[i], 'log', percent=99.)
                        ax[i].imshow(stars[i],
                                     norm=norm,
                                     origin='lower',
                                     cmap='viridis')
                plt.title('{} sample stars for epsf'.format(
                    header_info['APERTURE']))
                if save_plot:
                    figname = os.path.join(
                        extracted_sources_dir, '{}_sample_psfs.pdf'.format(
                            os.path.basename(f).split('.')[0]))
                    plt.savefig(figname)
                if parameters['show_extracted_sources']:
                    plt.show()

                #
                # Timer for ePSF construction
                #
                tic = time.perf_counter()
                epsf_builder = EPSFBuilder(oversampling=4,
                                           maxiters=3,
                                           progress_bar=False)
                print("Building ePSF ...")
                epsf, fitted_stars = epsf_builder(stars)
                toc = time.perf_counter()
                print("Time elapsed for building ePSF:", toc - tic)

                #
                # Figure - ePSF plot
                #
                norm_epsf = simple_norm(epsf.data, 'log', percent=99.)
                plt.figure()
                plt.imshow(epsf.data,
                           norm=norm_epsf,
                           origin='lower',
                           cmap='viridis')
                plt.colorbar()
                plt.title('{} epsf using {} stars'.format(
                    header_info['APERTURE'], len(stars_tbl)))
                if save_plot:
                    figname = os.path.join(
                        extracted_sources_dir, '{}_epsf.pdf'.format(
                            os.path.basename(f).split('.')[0]))
                    plt.savefig(figname)
                if parameters['show_extracted_sources']:
                    plt.show()

                daogroup = DAOGroup(5.0 * 2.0)
                psf_model = epsf.copy()

                tic = time.perf_counter()
                photometry = IterativelySubtractedPSFPhotometry(
                    finder=iraffind,
                    group_maker=daogroup,
                    bkg_estimator=mmm_bkg,
                    psf_model=psf_model,
                    fitter=LevMarLSQFitter(),
                    niters=1,
                    fitshape=(11, 11),
                    aperture_radius=5)
                print('Performing source extraction and photometry ...')
                epsf_extracted_sources = photometry(data_cps)
                toc = time.perf_counter()
                print("Time elapsed for PSF photometry:", toc - tic)
                print('Final source extraction with epsf: {} sources'.format(
                    len(epsf_extracted_sources)))

                epsf_extracted_sources['xcentroid'] = epsf_extracted_sources[
                    'x_fit']
                epsf_extracted_sources['ycentroid'] = epsf_extracted_sources[
                    'y_fit']
                extracted_sources = epsf_extracted_sources
                extracted_sources.write(extracted_sources_file, overwrite=True)

                norm = simple_norm(data_cps, 'sqrt', percent=99.)
                diff = photometry.get_residual_image()
                plt.figure()
                ax1 = plt.subplot(1, 2, 1)
                plt.xlabel("X [pix]")
                plt.ylabel("Y [pix]")
                ax1.imshow(data_cps, norm=norm, cmap='Greys')
                ax2 = plt.subplot(1, 2, 2)
                plt.xlabel("X [pix]")
                plt.ylabel("Y [pix]")
                ax2.imshow(diff, norm=norm, cmap='Greys')
                plt.title('PSF subtracted image for {}'.format(
                    os.path.basename(f)))
                if save_plot:
                    figname = os.path.join(
                        extracted_sources_dir,
                        '{}_psfsubtracted_image.pdf'.format(
                            os.path.basename(f).split('.')[0]))
                    plt.savefig(figname)
                if parameters['show_psfsubtracted_image']:
                    plt.show()

            else:

                extracted_sources = iraf_extracted_sources
                extracted_sources.write(extracted_sources_file, overwrite=True)

            positions = np.transpose((extracted_sources['xcentroid'],
                                      extracted_sources['ycentroid']))
            apertures = CircularAperture(positions, r=10)
            norm = simple_norm(data_cps, 'sqrt', percent=99.)

            plt.figure(figsize=(12, 12))
            plt.xlabel("X [pix]")
            plt.ylabel("Y [pix]")
            plt.imshow(data_cps, norm=norm, cmap='Greys', origin='lower')
            apertures.plot(color='blue', lw=1.5, alpha=0.5)
            title_string = '{}: {} selected sources'.format(
                os.path.basename(f), len(extracted_sources))
            plt.title(title_string)
            plt.tight_layout()
            if save_plot:
                figname = os.path.join(
                    standardized_data_dir, '{}_extracted_sources.pdf'.format(
                        os.path.basename(f).split('.')[0]))
                plt.savefig(figname)
            if parameters['show_extracted_sources']:
                plt.show()
            plt.close()

        else:
            extracted_sources = Table.read(extracted_sources_file)

        print('Extracted {} sources from {}'.format(len(extracted_sources), f))
        impose_positive_flux = True
        if impose_positive_flux and parameters['use_epsf']:
            extracted_sources.remove_rows(
                np.where(extracted_sources['flux_fit'] < 0)[0])
            print('Only {} sources have positve flux'.format(
                len(extracted_sources)))

        astrometry_uncertainty_mas = 5

        if len(extracted_sources) > 0:
            # Cal images are in DMS coordinates which correspond to the SIAF Science (SCI) frame
            extracted_sources['x_SCI'], extracted_sources[
                'y_SCI'] = extracted_sources['xcentroid'], extracted_sources[
                    'ycentroid']

            # For now, astrometric uncertainty defaults to 5 mas for each source.
            extracted_sources['sigma_x_mas'] = np.ones(
                len(extracted_sources)) * astrometry_uncertainty_mas
            extracted_sources['sigma_y_mas'] = np.ones(
                len(extracted_sources)) * astrometry_uncertainty_mas

        # transfer info to astropy table header
        for key, value in header_info.items():
            extracted_sources.meta[key] = value

        extracted_sources.meta['DATAFILE'] = os.path.basename(f)
        extracted_sources.meta['DATAPATH'] = os.path.dirname(f)
        extracted_sources.meta['EPOCH'] = header_info['epoch_isot']

        out_file = os.path.join(
            standardized_data_dir, '{}_FPA_data.fits'.format(
                extracted_sources.meta['DATAFILE'].split('.')[0]))

        print('Writing {}'.format(out_file))
        with warnings.catch_warnings():
            warnings.simplefilter('ignore', AstropyWarning, append=True)
            extracted_sources.write(out_file, overwrite=True)

    return im
Пример #25
0
bkgrms = MADStdBackgroundRMS ()
std = bkgrms (imageSumBV)

# On donne les paramètres du finder pour déterminer les étoiles de base dans l'image
iraffind = IRAFStarFinder (threshold = 10 * std,
                          fwhm = fwhm,
                          minsep_fwhm = 0.01, roundhi = 1.0, roundlo = -1.0,
                          sharplo = 0.1, sharphi = 0.8)



# On donne un critère de groupage des étoiles
daogroup = DAOGroup (2.0 * fwhm)

# On détermine le fond de ciel et la procédure de fitting
mmm_bkg = MMMBackground ()
#fitter = LevMarLSQFitter ()
fitter = SLSQPLSQFitter ()
psf_model = IntegratedGaussianPRF (sigma = fwhm / 2.35)

# On met tout ça dans une boîte noire qui fait des itérations soustractives
photometry = IterativelySubtractedPSFPhotometry (finder = iraffind,
                                                group_maker = daogroup,
                                                bkg_estimator = mmm_bkg,
                                                psf_model = psf_model,
                                                fitter = LevMarLSQFitter (),
                                                niters = 1, fitshape = (fshape, fshape))

# On exécute le tout et on extrait des résultats !
result_tabBV = photometry (image = imageSumBV)
residual_imageBV = photometry.get_residual_image ()
Пример #26
0
bkgrms = MADStdBackgroundRMS ()
std = bkgrms (imageSumBV)

# On donne les paramètres du finder pour déterminer les étoiles de base dans l'image
iraffind = IRAFStarFinder (threshold = 10 * std,
                          fwhm = fwhm,
                          minsep_fwhm = 0.01, roundhi = 1.0, roundlo = -1.0,
                          sharplo = 0.1, sharphi = 0.8)



# On donne un critère de groupage des étoiles
daogroup = DAOGroup (2.0 * fwhm)

# On détermine le fond de ciel et la procédure de fitting
mmm_bkg = MMMBackground ()
fitter = LevMarLSQFitter ()
psf_model = IntegratedGaussianPRF (sigma = fwhm / 2.35)

# On met tout ça dans une boîte noire qui fait des itérations soustractives
photometry = IterativelySubtractedPSFPhotometry (finder = iraffind,
                                                group_maker = daogroup,
                                                bkg_estimator = mmm_bkg,
                                                psf_model = psf_model,
                                                fitter = LevMarLSQFitter(),
                                                niters = 1, fitshape = (2 * fwhm - 1, 2 * fwhm - 1))

# On exécute le tout et on extrait des résultats !
result_tabBV = photometry (image = imageSumBV)
residual_imageBV = photometry.get_residual_image ()
#result_tabBV.write ('result_tabBV.2.5s.dat', format = 'ascii')
Пример #27
0
        def SubmitEvent(self):

            #Not a fan of globals but this is the easiest way to grab the file location
            global fileLocation
            #sigma_psf = 2.88
            #Grab the Sigma from the Entry box in the GUI
            SigmaPSF = SigmaPSFentry.get()
            #Turn the string into a float
            sigma_psf = float(SigmaPSF)
            #Grab the number of iterations from Entry box in GUI
            N_iters1 = nitersEntry.get()
            #Turn the string into a float
            N_iters = float(N_iters1)
            #Test cases to make sure that information was flowing from the GUI to the program
            #print(SigmaPSF)
            #print(N_iters)

            #Open the file as a fits (allows us to handle it) then turn that into readable data.
            with fits.open(fileLocation) as hdul:
                image = hdul[0].data

            #automatically gathered information needed to run the Star Finder
            bkgrms = MADStdBackgroundRMS()
            std = bkgrms(image)

            #Find the stars
            iraffind = IRAFStarFinder(threshold=3.5 * std,
                                      fwhm=sigma_psf * gaussian_sigma_to_fwhm,
                                      minsep_fwhm=0.01,
                                      roundhi=5.0,
                                      roundlo=-5.0,
                                      sharplo=0.0,
                                      sharphi=2.0)
            #Group the stars
            daogroup = DAOGroup(2.0 * sigma_psf * gaussian_sigma_to_fwhm)

            #More automatically gathered info needed for IS-PSFPhotometry to take places
            mmm_bkg = MMMBackground()
            fitter = LevMarLSQFitter()
            #Grabbed from the user input
            psf_model = IntegratedGaussianPRF(sigma=sigma_psf)
            #Run IS-PSFPhotometry
            photometry = IterativelySubtractedPSFPhotometry(
                finder=iraffind,
                group_maker=daogroup,
                bkg_estimator=mmm_bkg,
                psf_model=psf_model,
                fitter=LevMarLSQFitter(),
                niters=N_iters,
                fitshape=(11, 11))
            #Do photometry on the image
            result_tab = photometry(image=image)
            #grab the resiudal image
            residual_image = photometry.get_residual_image()

            #Get the results of the photometry and print the aspects we want.
            phot_results = photometry(image)
            with open("output.txt", "w") as text_file:
                print(phot_results['x_fit', 'y_fit', 'flux_fit'],
                      file=text_file)
            print(phot_results['x_fit', 'y_fit', 'flux_fit'])
            print("Sum of pixels: {}".format(sum(sum(residual_image))))
            #Plot images made#
            #Start by creating plots.
            plt.subplot(1, 5, 1)
            #Show the first plot (which is just the raw image)
            plt.imshow(image,
                       cmap='viridis',
                       aspect=1,
                       interpolation='nearest',
                       origin='lower')
            plt.title('Raw')
            plt.colorbar(orientation='horizontal', fraction=0.046, pad=0.04)
            #Create the second plot
            plt.subplot(1, 5, 2)
            #Show the residual_image
            plt.imshow(residual_image,
                       cmap='viridis',
                       aspect=1,
                       interpolation='nearest',
                       origin='lower')
            plt.title('PSF')
            plt.colorbar(orientation='horizontal', fraction=0.046, pad=0.04)
            #Draw in the sum of pixels.
            plt.text(0,
                     65,
                     "Sum of pixels: {}".format(sum(sum(residual_image))),
                     fontsize=7)
            #Create the third plot which is the subtracted images combined.
            sb = image - residual_image
            plt.subplot(1, 5, 3)
            plt.imshow(sb,
                       cmap='viridis',
                       aspect=1,
                       interpolation='nearest',
                       origin='lower')
            plt.title('PSF-S')
            plt.colorbar(orientation='horizontal', fraction=0.046, pad=0.04)

            with open("AP_RI.txt", "w") as f:
                for _ in range(len(residual_image)):
                    f.write(str(residual_image[_]))
            with open("AP_BS.txt", "w") as f:
                for _ in range(len(sb)):
                    f.write(str(sb[_]))

            print("Starting creation of CSV")
            subprocess.run(['py', 'create_CSV.py'], shell=False)

            print("Starting creation of Stats")
            subprocess.run(['py', 'create_info.py'], shell=False)

            print("Starting Threshold")
            subprocess.run(['py', 'threshold.py'], shell=False)

            with open("APC_Res.csv", "r") as f:
                APC_Res = f.read()
            APC_Res = APC_Res.split(",")
            APC_Res = [float(i) for i in APC_Res]

            #Every (SquareRoot of the Pixels) datapoints create a new array. Into a 2D Array.
            #I'm going to use the Correct_Res list as the main list and store the temp list every Sqrt(pix) in it,
            #then reset that list and continue until the pixel count is met.
            #Have an internal counter. Reset that every Sqrt(Pix)
            temp_list = np.array([])
            SqrPixels = math.sqrt(len(APC_Res))
            internal_counter = 0
            #print(SqrPixels)
            #print(len(APC_Res))
            Corrected_Res = np.array([[]])

            for _ in range(len(APC_Res)):
                if internal_counter <= SqrPixels - 2:
                    try:
                        temp_list = np.append(temp_list, APC_Res[_ - 1])
                        #print(_)
                        if _ + 1 == (int(SqrPixels) * int(SqrPixels)):
                            Corrected_Res = np.append(Corrected_Res, temp_list)
                    except:
                        print("Not right 2.0")
                    internal_counter = internal_counter + 1
                else:
                    internal_counter = 0
                    #print(temp_list)
                    Corrected_Res = np.append(Corrected_Res, temp_list)
                    temp_list = []
                    temp_list = np.append(temp_list, APC_Res[_ - 1])
                    #print("Resetting Counter & List {}".format(_))
                    if _ + 1 == (int(SqrPixels) * int(SqrPixels)):
                        Corrected_Res = np.append(Corrected_Res, temp_list)
                        #print(_+1)
                    #print("Iteration {}".format(_))
            #print(residual_image)
            #print("\n")
            #print(Corrected_Res)
            Corrected_Res = np.reshape(Corrected_Res,
                                       (int(SqrPixels), int(SqrPixels)))

            Correct_BS = image - Corrected_Res
            plt.subplot(1, 5, 4)
            plt.imshow(Corrected_Res,
                       cmap='viridis',
                       aspect=1,
                       interpolation='nearest',
                       origin='lower')
            plt.title('CPSF')
            plt.colorbar(orientation='horizontal', fraction=0.046, pad=0.04)

            plt.subplot(1, 5, 5)
            plt.imshow(Correct_BS,
                       cmap='viridis',
                       aspect=1,
                       interpolation='nearest',
                       origin='lower')
            plt.title('CPSF-S')
            plt.colorbar(orientation='horizontal', fraction=0.046, pad=0.04)

            #Number of bins
            n_bins = 20
            #Not super sure why this works the way that it does if I’m being truthful, took tinkering to work, and lots of documentation examples.
            fig, axs = plt.subplots(1, 2)

            # We can set the number of bins with the `bins` kwarg
            axs[0].hist(residual_image, bins=n_bins)
            plt.title('Residual Image Hist')
            axs[1].hist(sb, bins=n_bins)
            plt.title('Background Subtracted Hist')
            #plt.colorbar(orientation='horizontal', fraction=0.046, pad=0.04)
            #All Pixels from residual image

            fig = plt.figure()
            ax = fig.add_subplot(111, projection='3d')
            delta = (6 * (1 / len(sb)))

            nx = ny = np.arange(-3.0, 3.0, delta)
            X, Y = np.meshgrid(nx, ny)
            #print(X)
            #print(Y)
            x, y, z = X * len(sb), Y * len(sb), sb
            ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis')

            figi = plt.figure()
            axi = figi.add_subplot(111, projection='3d')
            deltai = (6 * (1 / len(sb)))

            nxi = nyi = np.arange(-3.0, 3.0, deltai)
            Xi, Yi = np.meshgrid(nxi, nyi)
            #print(X)
            #print(Y)
            xi, yi, zi = Xi * len(Correct_BS), Yi * len(Correct_BS), Correct_BS
            axi.plot_surface(xi, yi, zi, rstride=1, cstride=1, cmap='viridis')

            plt.show()