Exemplo n.º 1
0
def measure_one_annular_bg(image, center, innerRad, outerRad, metric, apMethod='exact'):
    """Class methods are similar to regular functions.

    Note:
        Do not include the `self` parameter in the ``Args`` section.

    Args:
        param1: The first parameter.
        param2: The second parameter.

    Returns:
        True if successful, False otherwise.

    """
    
    innerAperture   = CircularAperture(center, innerRad)
    outerAperture   = CircularAperture(center, outerRad)
    
    inner_aper_mask = innerAperture.to_mask(method=apMethod)[0]
    inner_aper_mask = inner_aper_mask.to_image(image.shape).astype(bool)
    
    outer_aper_mask = outerAperture.to_mask(method=apMethod)[0]
    outer_aper_mask = outer_aper_mask.to_image(image.shape).astype(bool)
    
    backgroundMask = (~inner_aper_mask)*outer_aper_mask
    
    return metric(image[backgroundMask])
Exemplo n.º 2
0
    def custom_aperture(self,
                        shape=None,
                        r=0.0,
                        l=0.0,
                        w=0.0,
                        theta=0.0,
                        pos=None,
                        method='exact'):
        """
        Creates a custom circular or rectangular aperture of arbitrary size. 
        
        Parameters
        ----------       
        shape: str, optional
            The shape of the aperture to be used. Must be either `circle` or `rectangle.`
        r: float, optional
            If shape is `circle` the radius of the circular aperture to be used.
        l: float, optional
            If shape is `rectangle` the length of the rectangular aperture to be used.
        w: float, optional
            If shape is `rectangle` the width of the rectangular aperture to be used.
        theta: float, optional
            If shape is `rectangle` the rotation of the rectangle relative to detector coordinate. 
            Uses units of radians.   
        pos: tuple, optional
            The center of the aperture, in TPF coordinates. If not set, defaults to the center of the TPF.
        method: str, optional
            The method of producing a light curve to be used, either `exact`, `center`, or `subpixel`.
            Passed through to photutils and used as intended by that package.
        """
        if shape is None:
            print("Please select a shape: circle or rectangle")

        shape = shape.lower()
        if pos is None:
            pos = (self.tpf.shape[1] / 2, self.tpf.shape[2] / 2)
        else:
            pos = pos

        if shape == 'circle':
            if r == 0.0:
                print("Please set a radius (in pixels) for your aperture")
            else:
                aperture = CircularAperture(pos, r=r)
                self.aperture = aperture.to_mask(method=method)[0].to_image(
                    shape=((np.shape(self.tpf[0]))))

        elif shape == 'rectangle':
            if l == 0.0 or w == 0.0:
                print(
                    "For a rectangular aperture, please set both length and width: custom_aperture(shape='rectangle', l=#, w=#)"
                )
            else:
                aperture = RectangularAperture(pos, l=l, w=w, t=theta)
                self.aperture = aperture.to_mask(method=method)[0].to_image(
                    shape=((np.shape(self.tpf[0]))))
        else:
            print(
                "Aperture shape not recognized. Please set shape == 'circle' or 'rectangle'"
            )
Exemplo n.º 3
0
def measure_one_background(image, center, aperRad, metric, apMethod='exact', bgMethod='circle'):
    """Class methods are similar to regular functions.

    Note:
        Do not include the `self` parameter in the ``Args`` section.

    Args:
        param1: The first parameter.
        param2: The second parameter.

    Returns:
        True if successful, False otherwise.

    """
    
    if np.ndim(aperRad) == 0:
        aperture  = CircularAperture(center, aperRad)
        aperture  = aperture.to_mask(method=apMethod)[0]    # list of ApertureMask objects (one for each position)
        aperture  = ~aperture.to_image(image).astype(bool) # inverse to keep 'outside' aperture
    else:
        innerRad, outerRad = aperRad
        
        innerAperture   = CircularAperture(center, innerRad)
        outerAperture   = CircularAperture(center, outerRad)
        
        inner_aper_mask = innerAperture.to_mask(method=method)[0]
        inner_aper_mask = inner_aper_mask.to_image(image.shape).astype(bool)
    
        outer_aper_mask = outerAperture.to_mask(method=method)[0]
        outer_aper_mask = outer_aper_mask.to_image(image.shape).astype(bool)     
        
        aperture        = (~inner_aper_mask)*outer_aper_mask
    
    if bgMethod == 'median':
        medFrame  = median(image[aperture])
        madFrame  = scale.mad(image[aperture])
        
        medianMask= abs(image - medFrame) < nSig*madFrame
        
        aperture  = medianMask*aperture
    
    if bgMethod == 'kde':
        kdeFrame = kde.KDEUnivariate(image[aperture].ravel())
        kdeFrame.fit()
        
        return kdeFrame.support[kdeFrame.density.argmax()]
    
    return metric(image[aperture])
Exemplo n.º 4
0
def centralizar(img, banda, cx, cy):
    '''
    Encontra uma fonte dentro de uma abertura com o raio definido pelo usuário
    '''

    xmax, ymax = img.shape
    dmin = np.sqrt(xmax*xmax+ymax*ymax)

    r = session['r']
    aperture = CircularAperture((cx,cy), r)
    mask = aperture.to_mask()
    print(mask)
    _, median, std = session['stats'][banda]
    find = DAOStarFinder(fwhm=3, threshold=3*std)
    cr = find((img-median)*mask.to_image(img.shape))
    if cr:
        print(cr)
        # Pega o índice mais próxima do ponto clicado
        for i in range(len(cr)):
            x = cr[i]['xcentroid']
            y = cr[i]['ycentroid']
            d = np.sqrt((x-cx)**2+(y-cy)**2)
            if d<dmin:
                dmin = d
                cx = cr[i]['xcentroid']
                cy = cr[i]['ycentroid']

    return cx,cy
Exemplo n.º 5
0
    def _calculate_effective_fwhm(self):
        """Calculates the table to convert between entropy and effective PSF FWHM"""
        def entropy(a):
            a = a - a.min() + 1e-10
            a = a / a.sum()
            return -(a * log(a)).sum()

        fwhms = exp(linspace(log(1), log(50), 100))
        self._entropy_table = xa.DataArray(zeros((self.napt, 100)), name='entropy', dims='aperture fwhm'.split(),
                                           coords={'aperture': self._ds.aperture, 'fwhm': fwhms})

        for i, ar in enumerate(array(self._ds.aperture)):
            apt = CircularAperture((0, 0), ar)
            msk = apt.to_mask()[0]
            mb = msk.data.astype('bool')
            x, y = meshgrid(arange(mb.shape[0]), arange(mb.shape[1]))
            r = sqrt((x - ar)**2 + (y - ar)**2)
            sigmas = fwhms / 2.355
            self._entropy_table[i, :] = [entropy(norm.pdf(r, 0, sigma)[mb]) for sigma in sigmas]

        self._fwhm = self._ds.obj_entropy.copy()
        self._fwhm.name = 'fwhm'

        for aperture in self._fwhm.aperture:
            ip = interp1d(self._entropy_table.loc[aperture, :], self._entropy_table.fwhm, bounds_error=False,
                          fill_value=tuple(self._entropy_table.loc[aperture][[0, -1]]))
            for star in self._fwhm.star:
                m = self._fwhm.loc[:, star, aperture].notnull()
                self._fwhm.loc[m, star, aperture] = ip(self._ds.obj_entropy.loc[m, star, aperture])
Exemplo n.º 6
0
def measure_one_median_bg(image, center, aperRad, metric, nSig, apMethod='exact'):
    """Class methods are similar to regular functions.

    Note:
        Do not include the `self` parameter in the ``Args`` section.

    Args:
        param1: The first parameter.
        param2: The second parameter.

    Returns:
        True if successful, False otherwise.

    """
    
    aperture       = CircularAperture(center, aperRad)
    aperture       = aperture.to_mask(method=apMethod)[0]
    aperture       = aperture.to_image(image.shape).astype(bool)
    backgroundMask = ~aperture
    
    medFrame  = median(image[backgroundMask])
    madFrame  = std(image[backgroundMask])
    
    medianMask= abs(image - medFrame) < nSig*madFrame
    
    maskComb  = medianMask*backgroundMask
    
    return median(image[maskComb])
Exemplo n.º 7
0
 def plot_aperture_masks(self,
                         radius=None,
                         minp=0.0,
                         maxp=99.9,
                         cols=5,
                         figsize=(11, 2.5)):
     if radius is not None:
         aps = CircularAperture(
             [self._stars.xcentroid, self._stars.ycentroid], radius)
     else:
         aps = self._aps
     fig, axs = subplots(int(ceil(self.nstars / cols)),
                         cols,
                         figsize=figsize,
                         sharex=True,
                         sharey=True)
     for m, ax in zip(aps.to_mask(), axs.flat):
         #d = m.multiply(self.reduced)
         d = where(m.data.astype('bool'), m.cutout(self.reduced), nan)
         ax.imshow(d,
                   cmap=cm.gray_r,
                   origin='image',
                   norm=sn(d,
                           stretch='log',
                           min_percent=minp,
                           max_percent=maxp))
     fig.tight_layout()
Exemplo n.º 8
0
def detect_sources_daofind(fx, fy, photons, threshold):
    mask_radius = 1700
    kernel = Gaussian2DKernel(x_stddev=4 * gaussian_fwhm_to_sigma)

    aperture = CircularAperture((2400, 2400), r=mask_radius)
    mask = aperture.to_mask(method='center')
    mask = mask.to_image(shape=((4800, 4800)))

    weights = photons / framecount_per_sec
    bins = np.arange(0, 4801)
    ndarray, yedges, xedges = np.histogram2d(fy,
                                             fx,
                                             bins=(bins, bins),
                                             weights=weights)
    data = ndarray * mask

    mean, _, std = sigma_clipped_stats(data, sigma=5., maxiters=1)
    # to avoid std becoming zero.
    if std == 0:
        mean = np.mean(data)
        std = np.std(data)

    data = convolve(data, kernel)
    daofind = DAOStarFinder(fwhm=3.0,
                            threshold=threshold * std,
                            exclude_border=True)
    sources = daofind(data - mean)
    sources.sort('mag')

    uA = np.array([sources['xcentroid'].data, sources['ycentroid'].data]).T
    uA = np.round(uA, 2)
    return uA
Exemplo n.º 9
0
def phot(data,xc,yc,r=5,dr=5):

    if dr>0:
        bgflux = skybg_phot(data,xc,yc,r,dr)
    else:
        bgflux = 0
    positions = [(xc, yc)]
    data = data-bgflux
    data[data<0] = 0 

    apertures = CircularAperture(positions, r=r)
    phot_table = aperture_photometry(data, apertures, method='exact')
    subdata = data[
        apertures.to_mask()[0].bbox.iymin:apertures.to_mask()[0].bbox.iymax,
        apertures.to_mask()[0].bbox.ixmin:apertures.to_mask()[0].bbox.ixmax
    ] * apertures.to_mask()[0].data

    npp = subdata.sum()**2 / np.sum(subdata**2)
    return float(phot_table['aperture_sum']), bgflux, npp
Exemplo n.º 10
0
def circle_mask(position_centre, radius, mask_image, image_input):   
    aperture = CircularAperture(position_centre, r = radius)  #creates an aperture about the input pixel location. 
    masks = aperture.to_mask(method ='center') #creates mask obejct
    
    mask = masks
    big_circle = mask.to_image(shape = image_input.shape)  #creates mask matrix of same shape as input image

    central_circle_mask = 1 - big_circle  #invert matrix
    
    mask_image_ones = mask_image*central_circle_mask  #add mask to global mask
    return mask_image_ones
Exemplo n.º 11
0
def circle_mask(position_centre, radius, mask_image, image_input):
    aperture = CircularAperture(position_centre, r=radius)
    masks = aperture.to_mask(method='center')

    mask = masks
    big_circle = mask.to_image(shape=image_input.shape)

    central_circle_mask = 1 - big_circle  #invert matrix

    mask_image_ones = mask_image * central_circle_mask  #add mask to global mask
    return mask_image_ones
Exemplo n.º 12
0
 def centroid_single(self, star, r=20, pmin=80, pmax=95, niter=1):
     #if any(cpix < 0.) or any(cpix > im._data.shape[0]):
     #    self.centroid = [nan, nan]
     #    raise ValueError("Star outside the image FOV.")
     apt = CircularAperture(self._cur_centroids_pix[star, :], r)
     reduced_frame = self.reduced
     for iiter in range(niter):
         mask = apt.to_mask()[0]
         cutout = mask.cutout(reduced_frame).copy()
         p = percentile(cutout, [pmin, pmax])
         clipped_cutout = clip(cutout, *p) - p[0]
         c = com(clipped_cutout)
         apt.positions[:] = flip(c, 0) + array(
             [mask.bbox.slices[1].start, mask.bbox.slices[0].start])
     self._cur_centroids_pix[star, :] = apt.positions
     self._update_apertures(self._cur_centroids_pix)
Exemplo n.º 13
0
def aperture(X, Y, N, R):
    """
    This function calculates a simple aperture function that is 1 within
    a circle of radius R, takes and intermediate value between 0
    and 1 in the edge and 0 otherwise. The values in the edges are calculated
    according to the percentage of area corresponding to the intersection of the
    physical aperture and the edge pixels.
    http://photutils.readthedocs.io/en/stable/aperture.html
    Input:
        X,Y: meshgrid with the coordinates of the detector ('sampling.py')
        R: radius (in pixel units) of the mask
    Output:
        A: 2D array with 0s and 1s
    """
    A = CircularAperture((N / 2, N / 2),
                         r=R)  #Circular mask (1s in and 0s out)
    A = A.to_mask(method='exact')  #Mask with exact value in edge pixels
    A = A.to_image(shape=(N, N))  #Conversion from mask to image
    return A
def getAperturePhotometry(img, starX, starY):
    stars = [(starX[i], starY[i]) for i in range(starX.shape[0])]
    aperture = CAp(stars, r=8)

    aperture_masks = aperture.to_mask()
    star_apertures = [
        aperture_masks[i].multiply(img) for i in range(0, len(aperture_masks))
    ]

    #annulus_masks = annulus.to_mask(method = 'center')
    #star_annulus = [annulus_masks[i].multiply(data2) for i in range(0,len(annulus_masks))]

    #for i in range(0,len(star_apertures)):
    #    plt.imshow(star_apertures[i])
    #    plt.show()

    phot_table = ap(img, aperture)

    return phot_table["aperture_sum"]
Exemplo n.º 15
0
def measure_one_circle_bg(image, center, aperRad, metric, apMethod='exact'):
    """Class methods are similar to regular functions.

    Note:
        Do not include the `self` parameter in the ``Args`` section.

    Args:
        param1: The first parameter.
        param2: The second parameter.

    Returns:
        True if successful, False otherwise.

    """
    
    aperture  = CircularAperture(center, aperRad)
    aper_mask = aperture.to_mask(method=apMethod)[0]    # list of ApertureMask objects (one for each position)
    
    # backgroundMask = abs(aperture.get_fractions(np.ones(self.imageCube[0].shape))-1)
    backgroundMask = aper_mask.to_image(image.shape).astype(bool)
    backgroundMask = ~backgroundMask#[backgroundMask == 0] = False
    
    return metric(image[backgroundMask])
Exemplo n.º 16
0
def measure_one_KDE_bg(image, center, aperRad, metric, apMethod='exact'):
    """Class methods are similar to regular functions.

    Note:
        Do not include the `self` parameter in the ``Args`` section.

    Args:
        param1: The first parameter.
        param2: The second parameter.

    Returns:
        True if successful, False otherwise.

    """
    
    aperture       = CircularAperture(center, aperRad)
    aperture       = aperture.to_mask(method=apMethod)[0]
    aperture       = aperture.to_image(image.shape).astype(bool)
    backgroundMask = ~aperture
    
    kdeFrame = kde.KDEUnivariate(image[backgroundMask])
    kdeFrame.fit()
    
    return kdeFrame.support[kdeFrame.density.argmax()]
Exemplo n.º 17
0
    #    print(r_inner_as,r_outer_as)

    #   sys.exit(0)

    #    time.sleep(1)
    #    print(WCS.world_axis_physical_types)
    #    aperture_pix=aperture.to_pixel(wcs)
    #    print(aperture_pix)
    #    positions_pix=aperture_pix.positions
    #    r_pix=aperture_pix.r
    #    print('r_pix =',r_pix)
    positions_pix = (ra_pix, dec_pix)
    positions_pix = np.transpose(positions_pix)
    aperture_pix = CircularAperture(positions_pix, r_circle)

    mask_circle = aperture_pix.to_mask(method='center')
    mask_data = mask_circle[0].multiply(imdata)

    #    phot_table = aperture_photometry(imdata, aperture,wcs=wcs)
    phot_table = aperture_photometry(imdata, aperture_pix)
    #    print(phot_table)
    #    print(phot_table.colnames)
    #   print(phot_table['sky_center'])
    #    print(phot_table['xcenter'])
    #    print(phot_table['ycenter'])
    aper_sum = phot_table['aperture_sum']
    phot_table[
        'aperture_sum'].info.format = '%.8g'  # for consistent table output
    phot_table['xcenter'].info.format = '%.8g'  # for consistent table output
    phot_table['ycenter'].info.format = '%.8g'  # for consistent table output
Exemplo n.º 18
0
                                                 unpack=True)
i_iraf, ie_iraf, if_iraf, isky_iraf = np.loadtxt('phot_test_i.txdump',
                                                 usecols=(1, 2, 3, 4),
                                                 unpack=True)

# now try python
x, y = np.loadtxt(coords_file, usecols=(0, 1), unpack=True)
positions = np.array(zip(x, y))

hdu_g = fits.open(fits_g)
hdu_i = fits.open(fits_i)

apertures = CircularAperture(positions, r=8.)
annulus_apertures = CircularAnnulus(positions, r_in=10., r_out=14.)
print apertures.area()
ap_mask = apertures.to_mask(method='subpixel', subpixels=7)
dummy = np.ones_like(hdu_g[0].data)
ann_mask = annulus_apertures.to_mask(method='center')
ap_g = [m.apply(hdu_g[0].data) for i, m in enumerate(ap_mask)]
ap_i = [m.apply(hdu_i[0].data) for i, m in enumerate(ap_mask)]
area_g = [np.sum(m.apply(dummy)) for i, m in enumerate(ap_mask)]
area_i = [np.sum(m.apply(dummy)) for i, m in enumerate(ap_mask)]

print area_g, area_i
# plt.imshow(ap_g[0], interpolation='nearest')
# plt.show()
ann_g = [
    m.apply(hdu_g[0].data, fill_value=-999.) for i, m in enumerate(ann_mask)
]
ann_i = [
    m.apply(hdu_i[0].data, fill_value=-999.) for i, m in enumerate(ann_mask)
Exemplo n.º 19
0
def photom_av(ima, pos, radius, r_in=False, r_out=False, mode='median'):
    '''
    Aperture photometry in an aperture located at pixel coordinates 
    pos = ( (x0, y0), (x1, y1), ... ) with a radius=radius.
    When r_in and r_out are given, background is estimated in CircularAnnulus and subtracted.
    
    mode refers to how the background is estimated within the circlar annulus.
    Can be 'median' or 'mean'
    
    Photometry is calculating by median averaging the pixels within the aperture and 
    multiplying by the number of pixels in the aperture (including fractions of pixels).

    '''
    # Setting up the mask 
    if hasattr(ima, 'mask'):
      if ima.mask.size == 1:
	mask = np.zeros(ima.shape, dtype=np.bool) | ima.mask
      else:
        mask = ima.mask.copy()
    else:
        mask = np.zeros(ima.shape, dtype=np.bool)
        
        
    ### Performing the actual photometry - identical for each method
    # Median averaging of flux in aperture
    # Setting up the aperture 
    apertures = CircularAperture(pos, r = radius) 
    ap_mask = apertures.to_mask(method='center')
    # Setting up arrays to store data
    nflx = len(ap_mask)
    flx = np.zeros(nflx, dtype=np.float)
    flux_max = np.zeros(nflx, dtype=np.float)
    flux_min = np.zeros(nflx, dtype=np.float)
    # Median averaging of flux
    for i, am in enumerate(ap_mask):
      fluxmask = ~mask & am.to_image(shape=mask.shape).astype(np.bool)
      flx[i] = np.median(ima[fluxmask])
      flux_max[i] = np.max(ima[fluxmask])
      flux_min[i] = np.min(ima[fluxmask])
      
      
      
      
    # Aperture photometry on mask to see how many masked pixels are in the 
    # aperture
    apm       = aperture_photometry(mask.astype(int), apertures)
    # Number of unmasked pixels in aperture
    ap_area   = Column(name = 'area_aper',
		       data=apertures.area() - apm['aperture_sum'].data)
    
    # Flux in aperture using median av flux and fractional no. pixels in aperture
    flux_init = flx*ap_area
    

    ### Two different modes for analysing the background
    if ( r_in and r_out and mode in ('mean', 'median') ):
      
      ### This stuff is the same regardless of method
      # Setting up the annulus
      anulus_apertures = CircularAnnulus(pos, r_in=r_in, r_out=r_out)
      # Performing annulus photometry on the mask
      bkgm = aperture_photometry(mask.astype(int), anulus_apertures)
      # Number of masked pixels in bkg
      mbkg_area = Column(name = 'bpix_bkg',
			 data=bkgm['aperture_sum'])  
      # Number of non-masked pixels in aperture and bkg        
      bkg_area  = Column(name = 'area_bkg',
			 data=anulus_apertures.area() - bkgm['aperture_sum'])
      
      
      ### This stuff is specific to the mean
      if mode == 'mean':
	# Perform the annulus photometry on the image
	bkg  = aperture_photometry(ima, anulus_apertures, mask=mask)
        # Average bkg where this divides by only number of NONMASKED pixels
        # as the aperture photometry ignores the masked pixels
        bkga = Column(name='background',
		      data=bkg['aperture_sum']/bkg_area)
        # Bkg subtracted flux
        flux = flux_init - bkga*ap_area
        # Adding that data
        ap.add_column(bkga)
        
        
      elif mode == 'median':
	# Number of pixels in the annulus, a different method
	aperture_mask = anulus_apertures.to_mask(method='center')
	nbkg = len(aperture_mask)
	
	# Background mask
	bkgm = np.zeros(nbkg, dtype=np.float)
	
	# Median averaging
	for i, am in enumerate(aperture_mask):
	  bmask = ~mask & am.to_image(shape=mask.shape).astype(np.bool)
	  bkgm[i] = np.median(ima[bmask])
		
	flux = flux_init - bkgm*ap_area
	bkgm = Column(name = 'background', data = bkgm)

        
    return flux, apm, flx, ap_area, flux_max, flux_min #flux, no.masked pixels in ap, median av flux
Exemplo n.º 20
0
def photometry(gal_name, source_find):

	# open fits file as 2D array
	os.chdir(gal_name)
	fits_name = '%s.cutout.pbcor.fits' % gal_name
	hdu = fits.open(fits_name)
	data = hdu[0].data[0][0]

	# open text file containing the scaled MAD and beam area saved by generate_cleans.statistics()
	with open('text/stdev.txt', 'r') as f:
		std_dev = float(f.readline())  # Jy/beam
	with open('text/beamarea.txt', 'r') as f:
		beamarea = float(f.readline())  # arcsec^2

	# create error image to pass to astropy photometry to calculate flux errors
	ones = np.ones_like(data)
	error_image = std_dev*ones

	# Flux density = Sum of pixels in aperture / number of pixels per beam
	# Pixels per beam = angular area of beam / angular area of one pixel
	pix_per_beam = beamarea/(cell_size**2)

	# aperture radius in pixels
	aper_radius = aperture_size/cell_size

	# number of beams in the aperture
	beams_per_aper = np.pi*aperture_size**2/beamarea

	# flux error calculation
	flux_error = np.sqrt(beams_per_aper)*std_dev/2



	# center of image
	positions = [(100, 100)]

	# If you don't know proper aperture size, let the function find the optimal one, create aperture object
	if make_growth_curves:
		aper_radius = find_aperture_size(positions, data)
	apertures = CircularAperture(positions, r=aper_radius)

	# make mask image where pixels outside aperture are zero, then find the maximum value in the aperture
	mask = apertures.to_mask(method='center')[0].to_image((201, 201))
	masked_data = np.multiply(data, mask)
	max_val_in_aperture = max(map(max, masked_data))

	with open('text/stdev.txt', 'r') as f_rms:
		rms = float(f_rms.readline())

	# If we are allowing a search for the brightest pixel as center of gaussian fit, check that pixel is not noise spike by verifying it is 3 sigma above noise
	# Otherwise, just set center to centroid of HST image
	if source_find:
		if (max_val_in_aperture > 3.*rms):
			#print(gal_name)
			# get coordinates of maximum point
			y_max = np.where(data == max_val_in_aperture)[0][0]
			x_max = np.where(data == max_val_in_aperture)[1][0]
			#print(x_max, y_max)


			positions = [(x_max, y_max)]
			with open('text/center_radio.txt', 'w') as f_center:
				f_center.write('%s\n' % x_max)
				f_center.write('%s\n' % y_max)
		else:
			# Get sky coordinates of centroid of HST image
			#hdu_hst = fits.open('%s_HST.fits' % gal_name[:5])
			#w_hst = wcs.WCS(hdu_hst[0])
			#trans_hst = w_hst.all_pix2world(1199., 1199., 0)
			#ra_hst, dec_hst = trans_hst[0], trans_hst[1]

			# Save centroid RA, Dec to file
			with open('text/center_radio.txt', 'w') as f_center:
				f_center.write('%s\n' % 100)
				f_center.write('%s\n' % 100)


	apertures = CircularAperture(positions, r=aper_radius)

	# Do background subtraction with an annulus if desired
	if bkgd_subtract:
		global annuli
		annuli = CircularAnnulus(positions, r_in=20, r_out=30)
		apers = [apertures, annuli]
	else:
		apers = [apertures]

	# Do the photometry, background subtraction if desired
	photo_table = aperture_photometry(data, apers, error=error_image)



	# return all relevant values
	output = []
	if bkgd_subtract:
		bkg_mean = photo_table['aperture_sum_1'] / annuli.area()
		bkg_sum = bkg_mean * apertures.area()
		final_sum = photo_table['aperture_sum_0'] - bkg_sum
		photo_table['residual_aperture_sum'] = final_sum
		flux = (photo_table['residual_aperture_sum']/pix_per_beam)[0]
	else:
		flux = (photo_table['aperture_sum']/pix_per_beam)[0]


	output.append(flux)
	output.append(flux_error)
	output.append(std_dev)
	output.append(pix_per_beam)
	output.append(beams_per_aper)
	output.append(max_val_in_aperture)
	output.append(max_val_in_aperture/std_dev)
	output.append(flux/flux_error)

	os.chdir('..')
	return output
        mag_ann = np.zeros(N_star)
        merr_ann = np.zeros(N_star)

        # aperture sum
        apert_sum = APPHOT(img_uint16, DAOapert,
                           method='exact')['aperture_sum']
        ap_area = DAOapert.area()
        #print(apert_sum)

        apert_result = 'ID, Msky, sky_std, Sky count Pixel_N, Sky reject Pixel_N, mag_ann, merr_ann\n'

        for star_ID in range(0, N_stars)[10:12]:

            # since our `DAOannul` has many elements :
            mask_annul = (DAOannul.to_mask(method='center'))[star_ID]
            mask_apert = (DAOapert.to_mask(method='center'))[star_ID]
            # CAUTION!! YOU MUST USE 'center', NOT 'exact'!!!

            cutimg = mask_annul.cutout(img)
            #cutimg.tofile('{0!s}_DAOstarfinder_Star_Flux_pixel_value_starID_{1:04}.csv'.format(f_name[:-4], star_ID), sep=',')
            df_cutimg = pd.DataFrame(cutimg * 65536.0, dtype=np.uint16)
            df_cutimg.to_csv(
                '{0!s}_DAOstarfinder_Star_Flux_pixel_value_starID_{1:04}.csv'.
                format(f_name[:-4], star_ID))

            cut_apert = mask_apert.cutout(img)
            #cutimg.tofile('{0!s}_DAOstarfinder_Star_Flux_pixel_value_starID_{1:04}.csv'.format(f_name[:-4], star_ID), sep=',')
            df_cut_apert = pd.DataFrame(cut_apert * 65536.0, dtype=np.uint16)
            df_cut_apert.to_csv(
                '{0!s}_DAOstarfinder_Star_apertruer_Flux_pixel_value_starID_{1:04}.csv'
                .format(f_name[:-4], star_ID))
Exemplo n.º 22
0
def object_find(x_centre, y_centre, mask_image_ones):
    radius_list = sp.arange(
        1, 51, 1
    )  #initial radius list to iterate through (aticapte radius's won't be higher than 12)
    radius_plot = []  #list to append radius values
    sum_list = []  # list to append cumulative sum
    bkg_list = []  #list to append background values
    bkg_diff = -10  #initial value for bkg_diff
    for radius in radius_list:
        if abs(bkg_diff) < 1 or (bkg_diff *
                                 (-1)) <= 0:  #criteria for adaptive aperture
            break
        else:
            position_centre = (x_centre, y_centre
                               )  #location of centre of aperture
            aperture = CircularAperture(position_centre,
                                        r=radius)  #create aperture object
            #aperture.plot(color='white', lw=2)

            aperture2 = CircularAperture(
                position_centre, r=radius + 1
            )  #create second aperture object to caclulate adjusted annulus area, radius is same size as annulus

            annulus_aperture = CircularAnnulus(position_centre,
                                               r_in=radius,
                                               r_out=radius +
                                               1)  #create annulus aperture
            #annulus_aperture.plot(color='red', lw=2)

            apers = [aperture, annulus_aperture]
            mask_image_zeros = 1 - mask_image_ones
            a = 1 - mask_image_ones  #invert mask image matrix for photoutils
            mask_global = a.astype(bool)

            phot_table = aperture_photometry(
                image, apers, mask=mask_global
            )  #counts flux in aperture and annulus, stores in phot_table
            mask1 = aperture.to_mask(
                method='center')  #creates new mask object for aperture
            mask = mask1
            image1 = mask.to_image(
                shape=image.shape
            )  #creates corresponding mask matrix of same shape as our image

            aperture_area = aperture.area - np.sum(
                image1 * np.copy(mask_image_zeros)
            )  #adjusted apertrue area, removing area of aperture already masked

            mask2 = aperture2.to_mask(method='center')
            mask = mask2
            image2 = mask.to_image(shape=image.shape)
            annulus_area = (
                aperture2.area - np.sum(image2 * np.copy(mask_image_zeros))
            ) - aperture_area  #adjusted annulus, removing area of annulus already masked

            bkg_mean = phot_table['aperture_sum_1'] / annulus_area
            bkg_list.append(
                bkg_mean)  #append bkg mean for each aperture to list
            bkg_sum = bkg_mean * aperture_area  #caclulate backgroud contribution for aperture
            final_sum = phot_table[
                'aperture_sum_0'] - bkg_sum  #calculate total contribution with bkg removed
            err_fs = sp.sqrt(
                (final_sum / 3.1) + (bkg_sum / 3.1)
            )  #poission error in final sum, added in quadrature with error from background(gain = 3.1)
            phot_table[
                'residual_aperture_sum'] = final_sum  #update phototable with aperture sum
            sum_list.append(final_sum)
            radius_plot.append(radius)

            if radius == 1:  #only calculated difference in background between two annuli if radius greater than 1
                continue
            else:
                bkg_diff = bkg_list[radius - 1] - bkg_list[radius - 2]
                continue

            continue

    masks = aperture.to_mask(method='center')  #creates mask of final aperture
    mask = masks
    image_l = mask.to_image(shape=image.shape)
    b = 1 - image_l  #invert mask matrix

    mask_image_ones = mask_image_ones * b  #add new masked object to global mask so identified object not rediscovered

    if len(radius_plot) < 3:  #avoid cataloguing small objects
        radius = None

    return radius, final_sum, err_fs, mask_image_ones
Exemplo n.º 23
0
def limiting_magnitude_prob(syntax, image, model=None, r_table=None):
    '''
    syntax - dict
        dDtionary of input paramters
    image - np.array
        Image of region of interest with target in center of image
    model - function
        - psf function from autophot
    '''
    try:

        from photutils import CircularAperture
        import matplotlib.pyplot as plt
        import numpy as np
        import matplotlib.gridspec as gridspec
        import random
        from scipy.optimize import curve_fit
        import warnings
        from photutils.datasets import make_noise_image
        # from autophot.packages.functions import mag
        from photutils import DAOStarFinder
        from astropy.stats import sigma_clipped_stats
        # from matplotlib.ticker import MultipleLocator
        from mpl_toolkits.axes_grid1 import make_axes_locatable
        from autophot.packages.rm_bkg import rm_bkg

        from astropy.visualization import ZScaleInterval

        import logging

        logger = logging.getLogger(__name__)

        limiting_mag_figure = plt.figure(figsize=set_size(240, aspect=1.5))

        gs = gridspec.GridSpec(2, 2, hspace=0.5, wspace=0.2)
        ax0 = limiting_mag_figure.add_subplot(gs[:, :-1])

        ax1 = limiting_mag_figure.add_subplot(gs[-1, -1])
        ax2 = limiting_mag_figure.add_subplot(gs[:-1, -1])

        # level for detection - Rule of thumb ~ 5 is a good detection level
        level = syntax['lim_SNR']

        logger.info('Limiting threshold: %d sigma' % level)

        image_no_surface, surface = rm_bkg(image, syntax, image.shape[0] / 2,
                                           image.shape[0] / 2)

        # =============================================================================
        # find and mask sources in close up
        # =============================================================================

        image_mean, image_median, image_std = sigma_clipped_stats(
            image,
            sigma=syntax['source_sigma_close_up'],
            maxiters=syntax['iters'])

        daofind = DAOStarFinder(fwhm=syntax['fwhm'],
                                threshold=syntax['bkg_level'] * image_std)

        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            # ignore no sources warning
            sources = daofind(image - image_median)

        if sources != None:
            positions = list(
                zip(np.array(sources['xcentroid']),
                    np.array(sources['ycentroid'])))

            positions.append((image.shape[0] / 2, image.shape[1] / 2))

        else:
            positions = [(image.shape[0] / 2, image.shape[1] / 2)]

        # "size" of source
        source_size = syntax['image_radius']

        pixel_number = int(np.ceil(np.pi * source_size**2))

        # Mask out target region
        mask_ap = CircularAperture(positions, r=source_size)

        mask = mask_ap.to_mask(method='center')

        mask_sumed = [i.to_image(image.shape) for i in mask]

        if len(mask_sumed) != 1:
            mask_sumed = sum(mask_sumed)
        else:
            mask_sumed = mask_sumed[0]

        mask_sumed[mask_sumed > 0] = 1

        logging.info('Number of pixels in star: %d' % pixel_number)

        # Mask out center region
        mask_image = (image_no_surface) * (1 - mask_sumed)

        vmin, vmax = (ZScaleInterval(nsamples=1500)).get_limits(mask_image)

        excluded_points = mask_image == 0
        exclud_x = excluded_points[0]
        exclud_y = excluded_points[1]

        exclud_zip = list(zip(exclud_x, exclud_y))

        included_points = np.where(mask_image != 0)

        includ_x = list(included_points[0])
        includ_y = list(included_points[1])

        includ_zip = list(zip(includ_x, includ_y))

        # ax2.scatter(exclud_y,exclud_x,color ='black',marker = 'X',alpha = 0.5  ,label = 'excluded_pixels',zorder = 1)
        ax2.scatter(includ_y,
                    includ_x,
                    color='red',
                    marker='x',
                    alpha=0.5,
                    label='included_pixels',
                    zorder=2)

        number_of_points = 300

        fake_points = {}

        if len(includ_zip) < pixel_number:
            includ_zip = includ_zip + exclud_zip

        for i in range(number_of_points):
            fake_points[i] = []
            # count = 0
            random_pixels = random.sample(includ_zip, pixel_number)
            xp_ran = [i[0] for i in random_pixels]
            yp_ran = [i[1] for i in random_pixels]

            fake_points[i].append([xp_ran, yp_ran])

        fake_sum = {}
        for i in range(number_of_points):

            fake_sum[i] = []

            for j in fake_points[i]:

                for k in range(len(j[0])):

                    fake_sum[i].append(image_no_surface[j[0][k]][j[1][k]])

        fake_mags = {}

        for f in fake_sum.keys():

            fake_mags[f] = np.sum(fake_sum[f])

# =============================================================================
#     Histogram
# =============================================================================

        hist, bins = np.histogram(list(fake_mags.values()),
                                  bins=len(list(fake_mags.values())),
                                  density=True)

        center = (bins[:-1] + bins[1:]) / 2

        sigma_guess = np.nanstd(list(fake_mags.values()))
        mean_guess = np.nanmean(list(fake_mags.values()))
        A_guess = np.nanmax(hist)

        def gauss(x, a, x0, sigma):
            return a * np.exp(-(x - x0)**2 / (2 * sigma**2))

        popt, pcov = curve_fit(gauss,
                               center,
                               hist,
                               p0=[A_guess, mean_guess, sigma_guess],
                               absolute_sigma=True)

        mean = popt[1]
        std = abs(popt[2])

        logging.info('Mean: %s - std: %s' % (round(mean, 3), round(std, 3)))

        if syntax['probable_detection_limit']:

            beta = float(syntax['probable_detection_limit_beta'])

            def detection_probability(n, sigma, beta):
                from scipy.special import erfinv
                '''

                Probabilistic upper limit computation base on:
                http://web.ipac.caltech.edu/staff/fmasci/home/mystats/UpperLimits_FM2011.pdf

                Assuming Gassauin nose distribution

                n: commonly used threshold value integer above some background level

                sigma: sigma value from noise distribution found from local area around source

                beta: Detection probability


                '''
                flux_upper_limit = (n +
                                    np.sqrt(2) * erfinv(2 * beta - 1)) * sigma

                return flux_upper_limit

            logging.info("Using Probable detection limit [b' = %d%% ]" %
                         (100 * beta))

            f_ul = mean + detection_probability(level, std, beta)

            logging.info("Flux Upper limit: %.3f" % f_ul)

        else:
            f_ul = abs(mean + level * std)
            logging.info('Detection at %s std: %.3f' % (level, f_ul))

        # =============================================================================
        # Plot histogram of background values
        # =============================================================================

        line_kwargs = dict(alpha=0.5, color='black', ls='--')

        # the histogram of the data
        n, bins, patches = ax0.hist(list(fake_mags.values()),
                                    density=True,
                                    bins=30,
                                    facecolor='blue',
                                    alpha=1,
                                    label='Pseudo-Flux\nDistribution')

        ax0.axvline(mean, **line_kwargs)
        ax0.axvline(mean + 1 * std, **line_kwargs)
        ax0.text(mean + 1 * std,
                 np.max(n),
                 r'$1\sigma$',
                 rotation=-90,
                 va='top')
        ax0.axvline(mean + 2 * std, **line_kwargs)
        ax0.text(mean + 2 * std,
                 np.max(n),
                 r'$2\sigma$',
                 rotation=-90,
                 va='top')

        if syntax['probable_detection_limit']:

            ax0.axvline(f_ul, **line_kwargs)
            ax0.text(f_ul,
                     np.max(n),
                     r"$\beta'$ = %d%%" % (100 * beta),
                     rotation=-90,
                     va='top')

        else:
            ax0.axvline(f_ul, **line_kwargs)
            ax0.text(mean + level * std,
                     np.max(n),
                     r'$' + str(level) + r'\sigma$',
                     rotation=-90,
                     va='top')

        x_fit = np.linspace(ax0.get_xlim()[0], ax0.get_xlim()[1], 250)

        ax0.plot(x_fit, gauss(x_fit, *popt), label='Gaussian Fit', color='red')

        ax0.ticklabel_format(axis='y', style='sci', scilimits=(-2, 0))
        ax0.yaxis.major.formatter._useMathText = True

        ax0.set_xlabel('Pseudo-Flux')
        ax0.set_ylabel('Normalised Probability')

        im2 = ax2.imshow(image - surface,
                         origin='lower',
                         aspect='auto',
                         interpolation='nearest')
        divider = make_axes_locatable(ax2)
        cax = divider.append_axes("right", size="5%", pad=0.05)
        cb = limiting_mag_figure.colorbar(im2, cax=cax)
        cb.ax.set_ylabel('Counts', rotation=270, labelpad=10)

        cb.formatter.set_powerlimits((0, 0))
        cb.ax.yaxis.set_offset_position('left')
        cb.update_ticks()

        ax2.set_title('Image - Surface')

        # =============================================================================
        # Convert counts to magnitudes
        # =============================================================================

        flux = f_ul / syntax['exp_time']

        mag_level = -2.5 * np.log10(flux)

        # =============================================================================
        # We now have an upper and lower estimate of the the limiting magnitude
        # =============================================================================
        '''
        Visual display of limiting case

        if PSF model is available use that

        else

        use a gaussian profile with the same number of counts
        '''
        fake_sources = np.zeros(image.shape)

        try:

            if syntax['c_counts']:
                pass

            model_label = 'PSF'

            def mag2image(m):
                '''
                Convert magnitude to height of PSF
                '''
                Amplitude = (syntax['exp_time'] /
                             (syntax['c_counts'] + syntax['r_counts'])) * (
                                 10**(m / -2.5))

                return Amplitude

            # PSF model that matches close-up shape around target
            def input_model(x, y, flux):
                return model(x, y, 0, flux, r_table, pad_shape=image.shape)

        except:
            '''
            if PSF model isn't available - use Gaussian instead

            '''
            logging.info('PSF model not available - Using Gaussian')
            model_label = 'Gaussian'

            sigma = syntax['fwhm'] / 2 * np.sqrt(2 * np.log(2))

            def mag2image(m):
                '''
                Convert magnitude to height of Gaussian
                '''

                #  Volumne/counts under 2d gaussian for a magnitude m
                volume = (10**(m / -2.5)) * syntax['exp_time']

                # https://en.wikipedia.org/wiki/Gaussian_function
                Amplitude = volume / (2 * np.pi * sigma**2)

                return Amplitude

            #  Set up grid

            def input_model(x, y, A):

                x = np.arange(0, image.shape[0])
                xx, yy = np.meshgrid(x, x)

                from autophot.packages.functions import gauss_2d, moffat_2d

                if syntax['use_moffat']:
                    model = moffat_2d(
                        (xx, yy), x, y, 0, A,
                        syntax['image_params']).reshape(image.shape)

                else:
                    model = gauss_2d(
                        (xx, yy), x, y, 0, A,
                        syntax['image_params']).reshape(image.shape)

                return model

        # =============================================================================
        #  What magnitude do you want this target to be?
        # =============================================================================

        mag2image = mag2image

        inject_source_mag = mag2image(mag_level)

        # =============================================================================
        # Random well-spaced points to plot
        # =============================================================================

        random_sources = sample_with_minimum_distance(
            n=[int(syntax['fwhm']),
               int(image.shape[0] - syntax['fwhm'])],
            k=syntax['inject_sources_random_number'],
            d=int(syntax['fwhm'] / 2))
        import math

        def PointsInCircum(r, n=100):
            return [(math.cos(2 * math.pi / n * x) * r + image.shape[1] / 2,
                     math.sin(2 * math.pi / n * x) * r + image.shape[0] / 2)
                    for x in range(0, n)]

        random_sources = PointsInCircum(2 * syntax['fwhm'], n=3)
        x = [abs(i[0]) for i in random_sources]
        y = [abs(i[1]) for i in random_sources]

        print(x)
        print(y)

        # =============================================================================
        # Inject sources
        # =============================================================================

        try:
            if syntax['inject_source_random']:

                for i in range(0, len(x)):

                    fake_source_i = input_model(x[i], y[i], inject_source_mag)

                    if syntax['inject_source_add_noise']:

                        nan_idx = np.isnan(fake_source_i)
                        fake_source_i[nan_idx] = 0
                        fake_source_i[fake_source_i < 0] = 0

                        fake_source_i = make_noise_image(
                            fake_source_i.shape,
                            distribution='poisson',
                            mean=fake_source_i,
                            random_state=np.random.randint(0, 1e3))
                        # fake_source_i[nan_idx] = np.nan1

                    fake_sources += fake_source_i
                    ax1.scatter(x[i],
                                y[i],
                                marker='o',
                                s=150,
                                facecolors='none',
                                edgecolors='r',
                                alpha=0.5)
                    ax1.annotate(str(i), (x[i], -.5 + y[i]),
                                 color='r',
                                 alpha=0.5,
                                 ha='center')

            if syntax['inject_source_on_target']:

                fake_source_on_target = input_model(image.shape[1] / 2,
                                                    image.shape[0] / 2,
                                                    inject_source_mag)

                if syntax['inject_source_add_noise']:
                    nan_idx = np.isnan(fake_source_on_target)
                    fake_source_on_target[nan_idx] = 1e-6
                    fake_source_on_target[fake_source_on_target < 0] = 0

                    fake_source_on_target = make_noise_image(
                        fake_source_on_target.shape,
                        distribution='poisson',
                        mean=fake_source_on_target,
                        random_state=np.random.randint(0, 1e3))

                fake_sources += fake_source_on_target

                ax1.scatter(image.shape[1] / 2,
                            image.shape[0] / 2,
                            marker='o',
                            s=150,
                            facecolors='none',
                            edgecolors='black',
                            alpha=0.5)
                ax1.annotate('On\nTarget',
                             (image.shape[1] / 2, -1 + image.shape[0] / 2),
                             color='black',
                             alpha=0.5,
                             ha='center')

            im1 = ax1.imshow(
                image - surface + fake_sources,
                # vmin = vmin,
                # vmax = vmax,
                aspect='auto',
                # norm = norm,
                origin='lower',
                interpolation='nearest')
            ax1.set_title(' Fake [%s] Sources ' % model_label)

        except Exception as e:
            logging.exception(e)
            im1 = ax1.imshow(
                image - surface,
                origin='lower',
                aspect='auto',
            )
            ax1.set_title('[ERROR] Fake Sources [%s]' % model_label)

        # plt.colorbar(im1,ax=ax1)
        divider = make_axes_locatable(ax1)
        cax = divider.append_axes("right", size="5%", pad=0.05)
        cb = limiting_mag_figure.colorbar(im1, cax=cax)
        cb.ax.set_ylabel('Counts', rotation=270, labelpad=10)
        # cb = fig.colorbar(im)
        cb.formatter.set_powerlimits((0, 0))
        cb.ax.yaxis.set_offset_position('left')
        cb.update_ticks()

        ax0.legend(loc='lower center',
                   bbox_to_anchor=(0.5, 1.02),
                   ncol=2,
                   frameon=False)

        limiting_mag_figure.savefig(
            syntax['write_dir'] + 'limiting_mag_porb.pdf',
            # box_extra_artists=([l]),
            bbox_inches='tight',
            format='pdf')
        plt.close('all')

    # master try/except
    except Exception as e:
        print('limit issue')
        logging.exception(e)

    syntax['maglim_mean'] = mean
    syntax['maglim_std'] = std

    return mag_level, syntax
#plt.imshow(data1)
#plt.show()
#plt.imshow(data2)
#plt.show()
#plt.imshow(data3)
#plt.show()

stars = [(4186, 1933), (4134, 1921), (3970, 1902), (3809, 1888), (3758, 1871),
         (2990, 1772), (2343, 1681), (1406, 1571), (899, 1512)]
stars2 = [(item[0] - 41, item[1] - 18) for item in stars]
aperture = CAp(stars, r=8)
aperture2 = CAp(stars2, r=8)
annulus1 = CAn(stars, r_in=12, r_out=18)
annulus2 = CAn(stars2, r_in=12, r_out=18)

aperture_masks1 = aperture.to_mask()
aperture_masks2 = aperture2.to_mask()
star_apertures1 = [
    aperture_masks1[i].multiply(data1) for i in range(0, len(aperture_masks1))
]
star_apertures2 = [
    aperture_masks1[i].multiply(data2) for i in range(0, len(aperture_masks1))
]
star_apertures3 = [
    aperture_masks2[i].multiply(data3) for i in range(0, len(aperture_masks2))
]

#annulus_masks = annulus.to_mask(method = 'center')
#star_annulus = [annulus_masks[i].multiply(data2) for i in range(0,len(annulus_masks))]

#for i in range(0,len(star_apertures1)):
Exemplo n.º 25
0
txdump_out.close()

g_iraf, ge_iraf, gf_iraf, gsky_iraf = np.loadtxt('phot_test_g.txdump', usecols=(1,2,3,4), unpack=True)
i_iraf, ie_iraf, if_iraf, isky_iraf = np.loadtxt('phot_test_i.txdump', usecols=(1,2,3,4), unpack=True)

# now try python
x, y = np.loadtxt(coords_file, usecols=(0,1), unpack=True)
positions = np.array(zip(x,y))

hdu_g = fits.open(fits_g)
hdu_i = fits.open(fits_i)

apertures = CircularAperture(positions, r=8.)
annulus_apertures = CircularAnnulus(positions, r_in=10., r_out=14.)
print apertures.area()
ap_mask = apertures.to_mask(method='subpixel', subpixels=7)
dummy = np.ones_like(hdu_g[0].data)
ann_mask = annulus_apertures.to_mask(method='center')
ap_g = [m.apply(hdu_g[0].data) for i,m in enumerate(ap_mask)]
ap_i = [m.apply(hdu_i[0].data) for i,m in enumerate(ap_mask)]
area_g = [np.sum(m.apply(dummy)) for i,m in enumerate(ap_mask)]
area_i = [np.sum(m.apply(dummy)) for i,m in enumerate(ap_mask)]

print area_g, area_i
# plt.imshow(ap_g[0], interpolation='nearest')
# plt.show()
ann_g = [m.apply(hdu_g[0].data, fill_value=-999.) for i,m in enumerate(ann_mask)]
ann_i = [m.apply(hdu_i[0].data, fill_value=-999.) for i,m in enumerate(ann_mask)]

flux_g = np.array([np.sum(a) for j,a in enumerate(ap_g)])
flux_i = np.array([np.sum(a) for j,a in enumerate(ap_i)])
Exemplo n.º 26
0
def limiting_magnitude_prob(syntax, image, model=None, r_table=None):
    '''
    syntax - dict
        dDtionary of input paramters
    image - np.array
        Image of region of interest with target in center of image
    model - function
        - psf function from autophot
    '''
    try:

        from photutils import CircularAperture
        import matplotlib.pyplot as plt
        import numpy as np
        import matplotlib.gridspec as gridspec
        import random
        from astropy.stats import SigmaClip
        from photutils import Background2D, MedianBackground
        from scipy.optimize import curve_fit
        import warnings

        from autophot.packages.functions import r_dist

        import logging

        logger = logging.getLogger(__name__)

        # level for detection - Rule of thumb ~ 5 is a good detection level
        level = syntax['lim_SNR']

        # Lower_level
        low_level = 3

        logger.info('Limiting threshold: %d sigma' % level)

        if syntax['psf_bkg_surface']:

            sigma_clip = SigmaClip(sigma=syntax['lim_SNR'])
            bkg_estimator = MedianBackground()
            bkg = Background2D(image, (3, 3),
                               filter_size=(4, 4),
                               sigma_clip=sigma_clip,
                               bkg_estimator=bkg_estimator)

            surface = bkg.background

            image_no_surface = image - surface

        if syntax['psf_bkg_poly']:

            from astropy.modeling import models, fitting
            surface_function_init = models.Polynomial2D(
                degree=syntax['psf_bkg_poly_degree'])

            fit_surface = fitting.LevMarLSQFitter()

            x = np.arange(0, image.shape[0])
            y = np.arange(0, image.shape[0])
            xx, yy = np.meshgrid(x, y)

            with warnings.catch_warnings():

                # Ignore model linearity warning from the fitter
                warnings.simplefilter('ignore')
                surface_fit = fit_surface(surface_function_init, xx, yy, image)

            surface = surface_fit(xx, yy)
            image_no_surface = image - surface

        if syntax['psf_bkg_local']:

            surface = np.ones(image.shape) * np.nanmedian(image)
            image_no_surface = image - np.nanmedian(image)

        # "size" of source
        source_size = 1.3 * syntax['image_radius']

        # Mask out target region
        mask_ap = CircularAperture([image.shape[0] / 2, image.shape[1] / 2],
                                   r=source_size)

        mask = mask_ap.to_mask(method='center')
        logging.info('Number of pixels in star: %d' %
                     np.sum(mask.to_image(image.shape)))

        # Mask out center region
        mask_image = (image_no_surface) * (1 - mask.to_image(image.shape))

        number_of_points = 1000

        fake_points = {}
        for i in range(number_of_points):
            fake_points[i] = []
            for j in range(int(np.sum(mask.to_image(image.shape)))):

                xp_ran = random.randint(0, int(image.shape[0] - 1))
                yp_ran = random.randint(0, int(image.shape[1] - 1))

                rp = r_dist(image.shape[0] / 2, xp_ran, image.shape[0] / 2,
                            yp_ran)

                if rp < source_size:

                    r_ran = random.randint(int(source_size - rp),
                                           int(image.shape[0] / 2 - rp - 1))
                    theta_ran = random.uniform(0, 2 * np.pi)
                    xp_ran = r_ran * np.cos(theta_ran) + xp_ran
                    yp_ran = r_ran * np.sin(theta_ran) + yp_ran
                fake_points[i].append((int(xp_ran), int(yp_ran)))

        fake_sum = {}
        for i in range(len(fake_points.keys())):
            fake_sum[i] = []
            list_tmp = []
            for j in fake_points[i]:

                list_tmp.append(mask_image[j[0]][j[1]])

            fake_sum[i] = list_tmp

        fake_mags = {}

        for f in fake_sum.keys():

            fake_mags[f] = np.sum(fake_sum[f])

        hist, bins = np.histogram(list(fake_mags.values()),
                                  bins=len(list(fake_mags.values())),
                                  density=True)
        center = (bins[:-1] + bins[1:]) / 2

        sigma = np.nanstd(list(fake_mags.values()))
        mean = np.nanmean(list(fake_mags.values()))
        A = np.nanmax(hist)

        def gauss(x, a, x0, sigma):
            return a * np.exp(-(x - x0)**2 / (2 * sigma**2))

        popt, pcov = curve_fit(gauss,
                               center,
                               hist,
                               p0=[A, mean, sigma],
                               absolute_sigma=True)

        mean = popt[1]
        std = abs(popt[2])

        logging.info('Mean: %s - std: %s' % (round(mean, 3), round(std, 3)))
        logging.info('Detection at %s std: %s' %
                     (level, round(mean + level * std, 3)))

        limiting_mag_figure = plt.figure(figsize=set_size(540, aspect=1))
        gs = gridspec.GridSpec(2, 2)
        ax0 = limiting_mag_figure.add_subplot(gs[:, :-1])

        ax1 = limiting_mag_figure.add_subplot(gs[-1, -1])
        ax2 = limiting_mag_figure.add_subplot(gs[:-1, -1])

        line_kwargs = dict(alpha=0.5, color='black', ls='--')

        # the histogram of the data
        n, bins, patches = ax0.hist(list(fake_mags.values()),
                                    density=True,
                                    bins=30,
                                    facecolor='blue',
                                    alpha=1,
                                    label='Pseudo-Flux Distribution')

        ax0.axvline(mean, **line_kwargs)
        ax0.axvline(mean + 1 * std, **line_kwargs)
        ax0.text(mean + 1 * std, np.max(n), r'$1\sigma$')
        ax0.axvline(mean + 2 * std, **line_kwargs)
        ax0.text(mean + 2 * std, np.max(n), r'$2\sigma$')
        ax0.axvline(mean + level * std, **line_kwargs)
        ax0.text(mean + level * std, np.max(n), r'$' + str(level) + r'\sigma$')

        x_fit = np.linspace(ax0.get_xlim()[0], ax0.get_xlim()[1], 1000)

        ax0.plot(x_fit, gauss(x_fit, *popt), label='Gaussian Fit', color='red')

        ax0.ticklabel_format(axis='y', style='sci', scilimits=(-2, 0))
        ax0.yaxis.major.formatter._useMathText = True

        ax0.set_xlabel('Pseudo-Flux')
        ax0.set_ylabel('Normalised Probability')

        im2 = ax2.imshow(image - surface, origin='lower')
        plt.colorbar(im2, ax=ax2)
        ax2.set_title('Image - Surface')

        x = random.sample(range(0, int(image.shape[0])),
                          int(image.shape[0]) // 3)
        y = random.sample(range(0, int(image.shape[1])),
                          int(image.shape[0]) // 3)

        counts = abs(mean + level * std)

        flux = counts / syntax['exp_time']

        mag_level = -2.5 * np.log10(flux)

        # Worse case scenario

        counts = abs(mean + low_level * std)

        low_flux = counts / syntax['exp_time']

        low_mag_level = -2.5 * np.log10(low_flux)

        try:

            model_label = 'PSF'

            def mag2image(m):
                return (syntax['exp_time'] /
                        (syntax['c_counts'] + syntax['r_counts'])) * (10**(
                            m / -2.5))

            def input_model(x, y, f):

                return model(x,
                             y,
                             0,
                             f,
                             r_table,
                             pad_shape=image.shape,
                             slice_scale=None)

            fake_sources = input_model(x[0], y[0], mag2image(mag_level))
            ax1.scatter(x[0],
                        y[0],
                        marker='o',
                        s=150,
                        facecolors='none',
                        edgecolors='r',
                        alpha=0.5)

        except:
            logging.info('PSF model not available - Using Gaussian')
            model_label = 'Gaussian'
            x_grid = np.arange(0, image.shape[0])
            xx, yy = np.meshgrid(x_grid, x_grid)

            def mag2image(flux):
                sigma = syntax['fwhm'] / 2 * np.sqrt(2 * np.log(2))
                Amplitude = flux / np.sqrt(np.pi * sigma)
                return Amplitude

            def input_model(x, y, f):

                return gauss_2d((xx, yy), x, y, 0, f,
                                syntax['fwhm'] / 2 * np.sqrt(2 * np.log(2)))

            fake_sources = input_model(x[0], y[0], mag2image(counts))
            ax1.scatter(x[0],
                        y[0],
                        marker='o',
                        s=150,
                        facecolors='none',
                        edgecolors='r',
                        alpha=0.5)

        try:
            for i in range(1, len(x)):
                fake_sources += input_model(x[i], y[i], mag2image(mag_level))
                ax1.scatter(x[i],
                            y[i],
                            marker='o',
                            s=150,
                            facecolors='none',
                            edgecolors='r',
                            alpha=0.5)

            im1 = ax1.imshow(image - surface + fake_sources, origin='lower')
            ax1.set_title('Fake Sources [%s]' % model_label)

        except Exception as e:
            logging.exception(e)
            im1 = ax1.imshow(image - surface, origin='lower')
            ax1.set_title('[ERROR] Fake Sources [%s]' % model_label)

        plt.colorbar(im1, ax=ax1)

        ax0.legend(loc='upper left', frameon=False)

        limiting_mag_figure.savefig(syntax['write_dir'] +
                                    'limiting_mag_porb.pdf',
                                    format='pdf')
        plt.close(limiting_mag_figure)

    except Exception as e:
        logging.exception(e)

    return mag_level, low_mag_level
Exemplo n.º 27
0
def computeRadialNoise(cube,
                       loc,
                       vels,
                       waves_dat,
                       temp_wavs,
                       temp_flux,
                       window_size,
                       order,
                       aperture_size=[5],
                       skip_pixels=8):
    apertures = CircularAperture(loc, r=skip_pixels)
    mask = np.zeros(cube[0, :, :].shape)
    angs = np.arange(0, 2 * np.pi, 2 * np.pi / 360.0)
    temp_w = temp_wavs
    temp_f = temp_flux
    r, theta = rec2polar(loc[0], loc[1])
    for k in range(len(angs)):
        i = angs[k]
        x, y = polar2rec(r, theta + i)
        if ((((x > apertures.bbox.ixmin) & (x < apertures.bbox.ixmax) &
              (y > apertures.bbox.iymin) & (y < apertures.bbox.iymax)))):
            #print("hit")
            mask[x, y] = -1
        else:
            mask[x, y] = 1
    locs_noise = np.where(mask == 1)
    sps = []
    #posns=np.random.randint(0,np.max(locs_noise),10)
    gif_path = "test.gif"
    frames_path = "pixels/im_"
    masks = []
    #sps_final=[]
    for epoch in range(15):
        mask_clone = np.zeros(mask.shape)
        posns = np.random.randint(0, len(locs_noise[0]), 1)
        #print(locs_noise)
        #posnx=np.random.randint(min(locs_noise[0]),max(locs_noise[0]),1)
        #posny=np.random.randint(min(locs_noise[1]),max(locs_noise[1]),1)
        mask_clone[locs_noise[0], locs_noise[1]] = 1
        mask_clone[locs_noise[0][posns], locs_noise[1][posns]] = 5

        #plt.pcolormesh(mask_clone)
        for i in range(len(posns)):
            if len(aperture_size) > 1:

                sp, apertures = measureSpatialSpec(
                    cube, [locs_noise[0][posns[i]], locs_noise[1][posns[i]]],
                    aperture_size)
                apmask = apertures.to_mask()
                mask_clone = mask_clone + np.transpose(
                    apmask.to_image(shape=mask_clone.shape))
            else:
                sp = cube[:, locs_noise[0][posns[i]], locs_noise[1][posns[i]]]
            t = temp_f
            w = temp_w
            CC = CrossCorr(vels)
            mean_n, n, snn = CC.compareFluxes(waves_dat,
                                              sp,
                                              temp_wavs,
                                              temp_flux,
                                              window_size=window_size,
                                              order=order)
            # n.append(n1
            sps.append(np.std(mean_n))
            masks.append(mask_clone)
        #sp_inter=np.mean(sps,axis=0)
        #sps_final.append(sp_inter)
    sp_final = np.mean(sps)
    #with imageio.get_writer(gif_path, mode='I',fps=3) as writer:
    ##   for i in range(30):
    #     print(frames_path.format(i=i))
    #    writer.append_data(imageio.imread(frames_path+"{i}.jpg".format(i=i)))

    #print(np.std(sp_final))
    return np.mean(sps), n, masks  #,sps
Exemplo n.º 28
0
        pommap2040=ndimage.convolve(pommap2040, kern, mode='constant', cval=0.0)
        pommap2040[np.where(pommap2040>1.0)]=1.0

        #Merge the GR150C and GR150R images and shift if necessary when placing within the full detector
        if k==0:
            pommap[4+yoff:2044+yoff,4+xoff:2044+xoff]=pommap2040
            pomintens[4:2044,4:2044]=grismminusmeasuredata[k]*pommap[4:2044,4:2044]
        else:
            pommap[4+yoff:2044+yoff,4+xoff:2044+xoff]=np.maximum(pommap[4+yoff:2044+yoff,4+xoff:2044+xoff],pommap2040)        
            pomintens[4:2044,4:2044]=np.maximum(pomintens[4:2044,4:2044],(grismminusmeasuredata[k]*pommap[4:2044,4:2044]))

    #Set constant high value in central regions of coronagraphic spots as too noisy to measure in flats 
    #Iterate over the four spots
    for k in range(4):
        coroaperture = CircularAperture(corocen[k,:], r=corodarkradius[k])
        coro_obj_mask = coroaperture.to_mask(method='center')
        coro_obj_mask_corr=2.0*coro_obj_mask.data
        pomintens[corocenfloor[k,1]-corodarkradius[k]:corocenfloor[k,1]+corodarkradius[k]+1,corocenfloor[k,0]-corodarkradius[k]:corocenfloor[k,0]+corodarkradius[k]+1]+=coro_obj_mask_corr

    #Replace pixels with intensity>0.98 to 1.00 as even though centres of spots in CV3 had values of a few percent this was probably scattered
    w=np.where(pomintens>0.98)
    pomintens[w]=1.00

    #Interpolate across bad pixels if at least 3 out of 4 corner neighbours are in POM mask
    pomintensfixbadpix=deepcopy(pomintens)
    for j in range(numbadpix):
        y=donotuseindices[0][j]
        x=donotuseindices[1][j]
        #Do not include reference pixels
        if y>3 and y<2044 and x>3 and x<2044:
            neighborsumpommap=pommap[y-1,x-1]+pommap[y-1,x+1]+pommap[y+1,x-1]+pommap[y+1,x+1]
Exemplo n.º 29
0
def maskstarsPSF(image: np.ndarray, objs: List, header, skyCount: float,
                 numSigmas=5., adaptive=True, sky_err=0.0) -> np.ndarray:
    '''Use the PSF to estimate stars radius, then masks them out.

    Parameters
    ----------

    image : np.ndarray
        Image that is to be masked of nuisance stars.

    objs : List[float, float, str, float]
        List of objects. [RA, DEC, type, psfMag_r]

    header : astropy.io.fits.header.Header
        The header of the current image. Contains information on PSF and
        various other parameters.

    skyCount : float
        Sky background in counts.

    numSigmas : optional, float
        Number of sigmas that the stars radius should extend to

    Returns
    -------

    mask : np.ndarray
        Array that masks stars on original image.

    '''

    sigma1 = header["PSF_S1"]          # Sigma of Gaussian fit of PSF
    sigma2 = header["PSF_S2"]          # Sigma of Gaussian fit of PSF
    expTime = header["EXPTIME"]        # Exposure time of image
    aa = header["PHT_AA"]              # zero point
    kk = header["PHT_KK"]              # extinction coefficient
    airMass = header["AIRMASS"]        # airmass
    softwareBias = header["SOFTBIAS"]  # software bias added to pixel counts
    b = header["PHT_B"]                # softening parameter

    skyCount -= softwareBias

    # https://classic.sdss.org/dr7/algorithms/fluxcal.html#counts2mag
    factor = 0.4*(aa + kk*airMass)
    skyFluxRatio = ((skyCount) / expTime) * 10**(factor)
    skyMag = -(2.5 / np.log(10.)) * (np.arcsinh((skyFluxRatio) / (2*b)) + np.log(b))

    with warnings.catch_warnings():
        # ignore invalid card warnings
        warnings.simplefilter('ignore', category=AstropyWarning)
        wcsFromHeader = wcs.WCS(header)

    # if no objects create empty mask that wont interfere with future calculations
    if len(objs) > 0:
        mask = np.zeros_like(image)
    else:
        mask = np.ones_like(image)

    for obj in objs:
        # convert object RA, DEC to pixels
        pos = SkyCoord(obj[0], obj[1], unit="deg")
        pixelPos = wcs.utils.skycoord_to_pixel(pos, wcs=wcsFromHeader)

        # get object psfMag_r
        objectMag = obj[3]

        # calculate radius of star
        sigma = max(sigma1, sigma2)
        radius = numSigmas * _calculateRadius(skyMag, objectMag, sigma)

        # mask out star
        # aps = CircularAperture(pixelPos, r=radius)
        # masks = aps.to_mask(method="subpixel")
        # aperMask = np.where(masks.to_image(image.shape) > 0., 1., 0.)

        newMask = _circle_mask(mask.shape, (pixelPos[1], pixelPos[0]), radius)
        mask = np.logical_or(mask, newMask)
        # mask = np.logical_or(mask, aperMask)
        if adaptive:
            extraExtent = _calculateAdaptiveRadius(mask, pixelPos, image, skyCount, sky_err)

            aps = CircularAperture(pixelPos, r=radius+extraExtent)
            masks = aps.to_mask(method="subpixel")
            aperMask = np.where(masks.to_image(image.shape) > 0., 1., 0.)
            mask = np.logical_or(mask, aperMask)
            # tmp = image*~mask + ((skyCount+1000)*mask)  # not used??

    # invert calculated mask so that future calculations work
    if len(objs) > 0:
        mask = ~mask

    return mask
Exemplo n.º 30
0
def iraf_style_photometry(phot_apertures,
                          bg_apertures,
                          data,
                          dark_std_data,
                          header,
                          seeing,
                          bg_method='mean',
                          epadu=1.0,
                          gain=8.21,
                          non_linear_threshold=4000):
    """Computes photometry with PhotUtils apertures, with IRAF formulae
    Parameters
    ----------
    phot_apertures : photutils PixelAperture object (or subclass)
        The PhotUtils apertures object to compute the photometry.
        i.e. the object returned via CircularAperture.
    bg_apertures : photutils PixelAperture object (or subclass)
        The phoutils aperture object to measure the background in.
        i.e. the object returned via CircularAnnulus.
    data : array
        The data for the image to be measured.
    bg_method: {'mean', 'median', 'mode'}, optional
        The statistic used to calculate the background.
        All measurements are sigma clipped.
        NOTE: From DAOPHOT, mode = 3 * median - 2 * mean.
    epadu: float, optional
        Gain in electrons per adu (only use if image units aren't e-).
    Returns
    -------
    final_tbl : astropy.table.Table
        An astropy Table with the colums X, Y, flux, flux_error, mag,
        and mag_err measurements for each of the sources.
    """
    exptime = header['EXPTIME']

    if bg_method not in ['mean', 'median', 'mode']:
        raise ValueError(
            'Invalid background method, choose either mean, median, or mode')

    #Create a list to hold the flux for each source.
    aperture_sum = []
    interpolation_flags = np.zeros(len(phot_apertures.positions), dtype='bool')

    for i in range(len(phot_apertures.positions)):
        pos = phot_apertures.positions[i]
        #Cutout around the source position
        cutout_w = 15
        x_pos = pos[0]
        y_pos = pos[1]
        cutout = data[int((y_pos - cutout_w)):int(y_pos + cutout_w) + 1,
                      int(x_pos - cutout_w):int(x_pos + cutout_w) + 1]
        x_cutout = x_pos - np.floor(x_pos - cutout_w)
        y_cutout = y_pos - np.floor(y_pos - cutout_w)
        ap = CircularAperture((x_cutout, y_cutout), r=phot_apertures.r)

        #Cut out the pixels JUST inside the aperture, and check if there are NaNs there. If so, interpolate over NaNs in the cutout.
        ap_mask = ap.to_mask(method='exact')
        ap_cut = ap_mask.cutout(cutout)

        non_linear_sum = np.sum(ap_cut / gain > non_linear_threshold)

        # if non_linear_sum > 0:
        #     print('Pixels in the non-linear range!')
        #     breakpoint()

        bad_sum = np.sum(np.isnan(ap_cut))

        if bad_sum > 0:
            bads = np.where(np.isnan(cutout))
            bad_dists = np.sqrt((bads[0] - y_cutout)**2 +
                                (bads[1] - x_cutout)**2)

            #Check if any bad pixels fall within the aperture. If so, set the interpolation flag to True for this source.
            if np.sum(bad_dists < phot_apertures.r + 1):

                # if np.sum(bad_dists < 1) == 0:
                #     #ONLY interpolate if bad pixels lay away from centroid position by at least a pixel.
                #     #2D gaussian fitting approach
                #     #Set up a 2D Gaussian model to interpolate the bad pixel values in the cutout.
                #     model_init = models.Const2D(amplitude=np.nanmedian(cutout))+models.Gaussian2D(amplitude=np.nanmax(cutout), x_mean=x_cutout, y_mean=y_cutout, x_stddev=seeing, y_stddev=seeing)
                #     xx, yy = np.indices(cutout.shape) #2D grids of x and y coordinates
                #     mask = ~np.isnan(cutout) #Find locations where the cutout has *good* values (i.e. not NaNs).
                #     x = xx[mask] #Only use coordinates at these good values.
                #     y = yy[mask]
                #     cutout_1d = cutout[mask] #Make a 1D cutout using only the good values.
                #     fitter = fitting.LevMarLSQFitter()
                #     model_fit = fitter(model_init, x, y, cutout_1d) #Fit the model to the 1d cutout.
                #     cutout[~mask] = model_fit(xx,yy)[~mask] #Interpolate the pixels in the cutout using the 2D Gaussian fit.
                #     pdb.set_trace()
                #     #TODO: interpolate_replace_nans with 2DGaussianKernel probably gives better estimation of *background* pixels.
                # else:
                #     interpolation_flags[i] = True

                #2D gaussian fitting approach
                #Set up a 2D Gaussian model to interpolate the bad pixel values in the cutout.
                model_init = models.Const2D(
                    amplitude=np.nanmedian(cutout)) + models.Gaussian2D(
                        amplitude=np.nanmax(cutout),
                        x_mean=x_cutout,
                        y_mean=y_cutout,
                        x_stddev=seeing,
                        y_stddev=seeing)
                xx, yy = np.indices(
                    cutout.shape)  #2D grids of x and y coordinates
                mask = ~np.isnan(
                    cutout
                )  #Find locations where the cutout has *good* values (i.e. not NaNs).
                x = xx[mask]  #Only use coordinates at these good values.
                y = yy[mask]
                cutout_1d = cutout[
                    mask]  #Make a 1D cutout using only the good values.
                fitter = fitting.LevMarLSQFitter()
                model_fit = fitter(model_init, x, y,
                                   cutout_1d)  #Fit the model to the 1d cutout.

                # #Uncomment this block to show inerpolation plots.
                # norm = ImageNormalize(cutout, interval=ZScaleInterval())
                # plt.ion()
                # fig, ax = plt.subplots(1, 4, figsize=(10,4), sharex=True, sharey=True)
                # ax[0].imshow(cutout, origin='lower', norm=norm)
                # ax[0].set_title('Data')
                # ax[1].imshow(model_fit(xx,yy), origin='lower', norm=norm)
                # ax[1].set_title('2D Gaussian Model')

                cutout[~mask] = model_fit(
                    xx, yy
                )[~mask]  #Interpolate the pixels in the cutout using the 2D Gaussian fit.

                # ax[2].imshow(cutout, origin='lower', norm=norm)
                # ax[2].set_title('Data w/ Bad\nPixels Replaced')
                # ax[3].imshow(cutout-model_fit(xx,yy), origin='lower')
                # ax[3].set_title('Residuals')
                # pdb.set_trace()

                interpolation_flags[i] = True

                # #Gaussian convolution approach.
                # cutout = interpolate_replace_nans(cutout, kernel=Gaussian2DKernel(x_stddev=0.5))

        phot_source = aperture_photometry(cutout, ap)
        # if np.isnan(phot_source['aperture_sum'][0]):
        #     pdb.set_trace()

        aperture_sum.append(phot_source['aperture_sum'][0])

    #Add positions/fluxes to a table
    xcenter = phot_apertures.positions[:, 0] * u.pix
    ycenter = phot_apertures.positions[:, 1] * u.pix
    phot = QTable([xcenter, ycenter, aperture_sum],
                  names=('xcenter', 'ycenter', 'aperture_sum'))

    #Now measure the background around each source.
    mask = make_source_mask(
        data, nsigma=3, npixels=5, dilate_size=7
    )  #Make a mask to block out any sources that might show up in the annuli and bias them.
    bg_phot = aperture_stats_tbl(
        ~mask * data, bg_apertures, sigma_clip=True
    )  #Pass the data with sources masked out to the bg calculator.
    ap_area = phot_apertures.area

    bg_method_name = 'aperture_{}'.format(bg_method)
    background = bg_phot[bg_method_name]
    flux = phot['aperture_sum'] - background * ap_area

    # Need to use variance of the sources for Poisson noise term in error computation.
    flux_error = compute_phot_error(flux, bg_phot, bg_method, ap_area, exptime,
                                    dark_std_data, phot_apertures, epadu)

    mag = -2.5 * np.log10(flux)
    mag_err = 1.0857 * flux_error / flux

    # Make the final table
    X, Y = phot_apertures.positions.T
    stacked = np.stack([
        X, Y, flux, flux_error, mag, mag_err, background, interpolation_flags
    ],
                       axis=1)
    names = [
        'X', 'Y', 'flux', 'flux_error', 'mag', 'mag_error', 'background',
        'interpolation_flag'
    ]

    final_tbl = Table(data=stacked, names=names)

    #Check for nans
    if sum(np.isnan(final_tbl['flux'])) > 0:
        bad_locs = np.where(np.isnan(final_tbl['flux']))[0]
        #pdb.set_trace()

    return final_tbl
Exemplo n.º 31
0
    def do_detection(self):
        """Flag outlier pixels in DQ of input images."""
        self.build_suffix(**self.outlierpars)
        self._convert_inputs()

        pars = self.outlierpars
        save_intermediate_results = pars['save_intermediate_results']

        # Start by performing initial TSO Photometry on stack of DataModels
        # TODO:  need information about the actual source position in
        # TSO imaging mode (for all subarrays).
        # Meanwhile, this is a placeholder representing the geometric
        # center of the image.
        nints, ny, nx = self.inputs.data.shape
        xcenter = (ny - 1) / 2.
        ycenter = (ny - 1) / 2.

        # all radii are in pixel units
        if self.inputs.meta.instrument.pupil == 'WLP8':
            radius = 50
            radius_inner = 60
            radius_outer = 70
        else:
            radius = 3
            radius_inner = 4
            radius_outer = 5

        apertures = CircularAperture((xcenter, ycenter), r=radius)
        aperture_mask = apertures.to_mask(method='center')[0]
        # This mask has 1 for mask region, 0 for outside of mask
        median_mask = aperture_mask.to_image((ny, nx))
        inv_median_mask = np.abs(median_mask - 1)
        # Perform photometry
        catalog = tso_aperture_photometry(self.inputs, xcenter, ycenter,
                                          radius, radius_inner, radius_outer)

        # Extract net photometry for the source
        # This will be the value used for scaling the median image within
        # the aperture region
        phot_values = catalog['net_aperture_sum']

        # Convert CubeModel into ModelContainer of 2-D DataModels
        for image in self.input_models:
            image.wht = resample_utils.build_driz_weight(
                image, weight_type='exptime', good_bits=pars['good_bits'])

        # Initialize intermediate products used in the outlier detection
        input_shape = self.input_models[0].data.shape
        median_model = datamodels.ImageModel(init=input_shape)
        median_model.meta = deepcopy(self.input_models[0].meta)
        base_filename = self.inputs.meta.filename
        median_model.meta.filename = self.make_output_path(
            basepath=base_filename, suffix='median')

        # Perform median combination on set of drizzled mosaics
        median_model.data = self.create_median(self.input_models)
        aper2 = CircularAnnulus((xcenter, ycenter),
                                r_in=radius_inner,
                                r_out=radius_outer)

        tbl1 = aperture_photometry(median_model.data,
                                   apertures,
                                   error=median_model.data * 0.0 + 1.0)
        tbl2 = aperture_photometry(median_model.data,
                                   aper2,
                                   error=median_model.data * 0.0 + 1.0)

        aperture_sum = u.Quantity(tbl1['aperture_sum'][0])
        annulus_sum = u.Quantity(tbl2['aperture_sum'][0])
        annulus_mean = annulus_sum / aper2.area()
        aperture_bkg = annulus_mean * apertures.area()
        median_phot_value = aperture_sum - aperture_bkg

        if save_intermediate_results:
            log.info("Writing out MEDIAN image to: {}".format(
                median_model.meta.filename))
            median_model.save(median_model.meta.filename)

        # Scale the median image by the initial photometry (only in aperture)
        # to create equivalent of 'blot' images
        # Area outside of aperture in median will remain unchanged
        blot_models = datamodels.ModelContainer()
        for i in range(nints):
            scale_factor = float(phot_values[i] / median_phot_value)
            scaled_image = datamodels.ImageModel(init=median_model.data.shape)
            scaled_image.meta = deepcopy(median_model.meta)
            scaled_data = (median_model.data * (scale_factor * median_mask) +
                           (median_model.data * inv_median_mask))
            scaled_image.data = scaled_data
            blot_models.append(scaled_image)

        if save_intermediate_results:
            log.info("Writing out Scaled Median images...")

            def make_output_path(ignored, idx=None):
                output_path = self.make_output_path(
                    basepath=base_filename,
                    suffix='blot',
                    idx=idx,
                    component_format='_{asn_id}_{idx}')
                return output_path

            blot_models.save(make_output_path)

        # Perform outlier detection using statistical comparisons between
        # each original input image and its blotted version of the median image
        self.detect_outliers(blot_models)

        # clean-up (just to be explicit about being finished
        # with these results)
        del median_model, blot_models
Exemplo n.º 32
0
 def circular_mask(self, radius=4):
     circ = CircularAperture(self.centroid, radius)
     circ_mask = circ.to_mask()[0].to_image(self.image.shape).astype(bool)
     return circ_mask
Exemplo n.º 33
0
def rm_bkg(source,
                 syntax,
                 xc=0,
                 yc=0):

    import logging
    import warnings
    import numpy as np
    from astropy.stats import SigmaClip
    from photutils import Background2D, MedianBackground,MADStdBackgroundRMS, BkgIDWInterpolator
    from autophot.packages.aperture import ap_phot
    from astropy.modeling import models, fitting
    from photutils import CircularAperture
    from photutils import SExtractorBackground

    logger = logging.getLogger(__name__)

    # try:
    #     source_size = syntax['image_radius']
    # except:
    source_size = 2*syntax['fwhm']


    try:
        if syntax['psf_bkg_surface'] and not syntax['psf_bkg_poly']:

            sigma_clip = SigmaClip(sigma=syntax['lim_SNR'])

            bkg_estimator = MADStdBackgroundRMS()


            positions = [source.shape[0]/2,source.shape[1]/2]
            aperture = CircularAperture(positions, r=source_size)
            masks = aperture.to_mask(method='center')

            mask_array = masks.to_image(shape=((source.shape[0], source.shape[1])))

            bkg = Background2D(source,
                               box_size = (3, 3),
                               mask = mask_array,
                               filter_size=(3, 3),
                               sigma_clip=sigma_clip,
                               bkg_estimator=SExtractorBackground(),
                               interpolator= BkgIDWInterpolator(),
                               # edge_method = 'pad'
                               )


            surface = bkg.background

            # bkg_median = np.nanmedian(bkg.background_median)

            source_bkg_free = source - surface

        elif syntax['psf_bkg_poly']:


            surface_function_init = models.Polynomial2D(degree=syntax['psf_bkg_poly_degree'])

            fit_surface = fitting.LevMarLSQFitter()

            x = np.arange(0,source.shape[0])
            y = np.arange(0,source.shape[0])
            xx,yy= np.meshgrid(x,y)

            positions = [source.shape[0]/2,source.shape[1]/2]
            aperture = CircularAperture(positions, r=source_size)
            masks = aperture.to_mask(method='center')

            mask_array = masks.to_image(shape=((source.shape[0], source.shape[1])))

            source[mask_array.astype(bool)] == np.nan


            with warnings.catch_warnings():
                # Ignore model linearity warning from the fitter
                warnings.simplefilter('ignore')
                surface_fit = fit_surface(surface_function_init, xx, yy, source)

            surface = surface_fit(xx,yy)

            # bkg_median = np.nanmedian(surface)
            source_bkg_free = source - surface


        elif syntax['psf_bkg_local']:

            pos = list(zip([xc],[yc]))

            ap,bkg = ap_phot(pos,
                             source,
                             radius = syntax['ap_size'] * syntax['fwhm'],
                             r_in   = syntax['r_in_size'] * syntax['fwhm'],
                             r_out  = syntax['r_out_size'] * syntax['fwhm'])

            surface = np.ones(source.shape) * bkg

            source_bkg_free = source - surface


    except Exception as e:
        logger.exception(e)

    return source_bkg_free,surface
Exemplo n.º 34
0
 def circular_mask(self, radius=4):
     circ =  CircularAperture(self.centroid, radius)
     circ_mask = circ.to_mask()[0].to_image(self.image.shape).astype(bool)
     return circ_mask
Exemplo n.º 35
0
def object_find(x_centre, y_centre, mask_image_ones):
    radius_list = sp.arange(
        1, 51, 1
    )  #initial radius list to iterate through (aticapte radius's won't be higher than 12)
    radius_plot = []  #list to append radius values
    sum_list = []  # list to append cumulative sum
    bkg_list = []  #list to append background values
    err_fs_list = []  #list to append error in final sum
    bkg_mean_error_list = []  #list to append bkg mean error
    bkg_diff = -10  #initial value for bkg_diff
    for radius in radius_list:
        if abs(bkg_diff) < 1 or (bkg_diff *
                                 (-1)) <= 0:  #criteria for adaptive aperture
            break
        else:
            position_centre = (x_centre, y_centre
                               )  #location of centre of aperture
            aperture = CircularAperture(position_centre,
                                        r=radius)  #create aperture object
            #aperture.plot(color='white', lw=2)  #overplot the aperture circle onto a colormap plot (need to produce an plot before running function)

            aperture2 = CircularAperture(
                position_centre, r=radius + 1
            )  #create second aperture object to caclulate adjusted annulus area, radius is same size as annulus

            annulus_aperture = CircularAnnulus(position_centre,
                                               r_in=radius,
                                               r_out=radius +
                                               1)  #create annulus aperture
            #annulus_aperture.plot(color='red', lw=2) #overplot the annulus circle onto a colormap plo

            apers = [aperture, annulus_aperture]
            mask_image_zeros = 1 - mask_image_ones
            a = 1 - mask_image_ones  #invert mask image matrix for photoutils
            mask_global = a.astype(bool)

            phot_table = aperture_photometry(
                cropped_image, apers, mask=mask_global
            )  #counts flux in aperture and annulus, stores in phot_table
            mask1 = aperture.to_mask(
                method='center')  #creates new mask object for aperture
            mask = mask1
            image1 = mask.to_image(
                shape=cropped_image.shape
            )  #creates corresponding mask matrix of same shape as our image

            aperture_area = aperture.area - np.sum(
                image1 * np.copy(mask_image_zeros)
            )  #adjusted apertrue area, removing area of aperture already masked

            mask2 = aperture2.to_mask(method='center')
            mask = mask2
            image2 = mask.to_image(shape=cropped_image.shape)
            annulus_area = (
                aperture2.area - np.sum(image2 * np.copy(mask_image_zeros))
            ) - aperture_area  #adjusted annulus, removing area of annulus already masked

            bkg_mean = phot_table['aperture_sum_1'] / annulus_area
            bkg_list.append(
                bkg_mean)  #append bkg mean for each aperture to list
            bkg_sum = bkg_mean * aperture_area  #calculate backgroud contribution for aperture

            err_bkg_mean = sp.sqrt(bkg_mean /
                                   3.1)  #error in the mean value of bkg
            bkg_mean_error_list.append(sum(err_bkg_mean))
            err_bkg = sp.sqrt(
                bkg_sum / 3.1
            )  #error in bkg given by poisson statistics (divided by gain = 3.1)

            final_sum = phot_table[
                'aperture_sum_0'] - bkg_sum  #calculate total contribution with bkg removed

            err_fs = sp.sqrt(
                (phot_table['aperture_sum_0']) / 3.1 + err_bkg**2
            )  #poission error in final sum, added in quadrature with error from background(gain = 3.1)
            err_fs_list.append(
                sum(err_fs)
            )  #err_fs is in form of single list, so sum just takes absolute value and appends to main list
            phot_table[
                'residual_aperture_sum'] = final_sum  #update phototable with aperture sum
            sum_list.append(final_sum)
            radius_plot.append(radius)

            if radius == 1:
                continue

            else:
                bkg_diff = bkg_list[radius - 1] - bkg_list[radius - 2]
                continue

            continue

    masks = aperture.to_mask(method='center')  #creates mask of final aperture
    mask = masks
    image_l = mask.to_image(shape=cropped_image.shape)
    b = 1 - image_l  #invert mask matrix

    mask_image_ones = mask_image_ones * b  #add new masked object to global mask

    def tanhh(x, a, b, c):  #define tanh to fit to cumulative frequency plot
        return c * sp.tanh((x - a) / b)

    if len(radius_plot) > 3:  # avoids catalogueing of high background noise
        popt, pcov = curve_fit(tanhh,
                               sp.ravel(radius_plot),
                               sp.ravel(sum_list),
                               p0=[6, 0.5, 5000])  #returns tanh fit parameters
        r_e = sp.sqrt(sp.diag(pcov))  #error covariance matrix
        radius = np.arctanh(0.5) * abs(popt[1]) + abs(
            popt[0])  #interpolate tanh function to find effective radius
        radius_err = sp.sqrt(
            (np.arctanh(0.5) * r_e[1])**2 +
            r_e[0]**2)  #standard error propogation to find error in radius

    else:
        radius = None
    """
    produces plots within object_find
    following graph is designed for synethetic 2d gaussian, however parameters can be adjustedfor any galaxy
    """
    f1, ax1 = plt.subplots()
    plt.title('Galaxy plot', fontweight='bold')
    ax1.set_xlabel('Pixel Radius')
    ax1.set_ylabel('Cumulative Pixel Count')
    ax1.errorbar(radius_plot,
                 sum_list,
                 yerr=err_fs_list,
                 marker='x',
                 label='Cumulative Frequency',
                 linestyle=' ')  #errorbars for final sum
    #print(bkg_list, 'bkg list')

    x_values = sp.arange(1, max(radius_plot), 0.1)

    ax1.plot(x_values,
             tanhh(x_values, *popt),
             label="Tanh Fit",
             color='darkslategrey')  #plot tanh fit
    ax1.vlines(radius,
               0,
               tanhh(radius, *popt),
               linestyles='dashed',
               label='Effective radius = {:.2f}{}{:.2f}'.format(
                   radius, u'\u00b1', radius_err),
               color='darkred')
    ax1.hlines(abs(popt[2] / 2),
               0,
               radius,
               linestyles='dashed',
               label='Half of Total Flux = {:.2e}'.format(popt[2] / 2),
               color='darkcyan')

    plt.xlim([0, 14])
    plt.ylim([0, 600000])
    ax3 = ax1.twinx()
    plt.ylim([3000, 13000])
    ax3.set_ylabel('Background mean')
    ax3.errorbar(radius_plot,
                 bkg_list,
                 yerr=sp.array(bkg_mean_error_list),
                 marker='x',
                 color='red',
                 label='Sky Background',
                 linestyle=' ')  #errorbars for bkg
    ax1.xaxis.grid(True, ls='--', which='minor')
    ax1.yaxis.grid(True, ls='--', which='minor')
    ax1.xaxis.grid(True, ls='-', which='major')
    ax1.yaxis.grid(True, ls='-', which='major')
    ax3.xaxis.grid(True, ls='-', which='major')
    ax3.yaxis.grid(True, ls='-', which='major')
    ax3.set_yticks(
        np.linspace(ax3.get_yticks()[0],
                    ax3.get_yticks()[-1], len(ax1.get_yticks())))
    ax1.xaxis.set_major_locator(MultipleLocator(2))
    ax1.yaxis.set_major_formatter(FormatStrFormatter('%.2e'))
    ax3.yaxis.set_major_formatter(FormatStrFormatter('%.2e'))
    ax1.xaxis.set_minor_locator(MultipleLocator(1))
    ax1.yaxis.set_major_locator(MultipleLocator(100000))
    ax1.yaxis.set_minor_locator(MultipleLocator(50000))
    ax3.yaxis.set_major_locator(MultipleLocator(2000))
    ax3.yaxis.set_minor_locator(MultipleLocator(1000))
    handles1, labels1 = ax1.get_legend_handles_labels()
    handles2, labels2 = ax3.get_legend_handles_labels()

    f1.legend(handles1 + handles2,
              labels1 + labels2,
              fancybox=True,
              bbox_to_anchor=(0.9, 0.5),
              loc='center right')
    f1.savefig('galaxy_plot3')

    return radius, final_sum, mask_image_ones
Exemplo n.º 36
0
    def do_detection(self):
        """Flag outlier pixels in DQ of input images."""
        self.build_suffix(**self.outlierpars)
        self._convert_inputs()

        pars = self.outlierpars
        save_intermediate_results = pars['save_intermediate_results']

        # Start by performing initial TSO Photometry on stack of DataModels
        # TODO:  need information about the actual source position in
        # TSO imaging mode (for all subarrays).
        # Meanwhile, this is a placeholder representing the geometric
        # center of the image.
        nints, ny, nx = self.inputs.data.shape
        xcenter = (ny - 1) / 2.
        ycenter = (ny - 1) / 2.

        # all radii are in pixel units
        if self.inputs.meta.instrument.pupil == 'WLP8':
            radius = 50
            radius_inner = 60
            radius_outer = 70
        else:
            radius = 3
            radius_inner = 4
            radius_outer = 5

        apertures = CircularAperture((xcenter, ycenter), r=radius)
        aperture_mask = apertures.to_mask(method='center')[0]
        # This mask has 1 for mask region, 0 for outside of mask
        median_mask = aperture_mask.to_image((ny, nx))
        inv_median_mask = np.abs(median_mask - 1)
        # Perform photometry
        catalog = tso_aperture_photometry(self.inputs, xcenter, ycenter,
                                          radius, radius_inner,
                                          radius_outer)

        # Extract net photometry for the source
        # This will be the value used for scaling the median image within
        # the aperture region
        phot_values = catalog['net_aperture_sum']

        # Convert CubeModel into ModelContainer of 2-D DataModels
        for image in self.input_models:
            image.wht = resample_utils.build_driz_weight(
                image,
                weight_type='exptime',
                good_bits=pars['good_bits']
            )

        # Initialize intermediate products used in the outlier detection
        input_shape = self.input_models[0].data.shape
        median_model = datamodels.ImageModel(init=input_shape)
        median_model.meta = deepcopy(self.input_models[0].meta)
        base_filename = self.inputs.meta.filename
        median_model.meta.filename = self.make_output_path(
            basepath=base_filename, suffix='median'
        )

        # Perform median combination on set of drizzled mosaics
        median_model.data = self.create_median(self.input_models)
        aper2 = CircularAnnulus((xcenter, ycenter), r_in=radius_inner,
                                r_out=radius_outer)

        tbl1 = aperture_photometry(median_model.data, apertures,
                                   error=median_model.data * 0.0 + 1.0)
        tbl2 = aperture_photometry(median_model.data, aper2,
                                   error=median_model.data * 0.0 + 1.0)

        aperture_sum = u.Quantity(tbl1['aperture_sum'][0])
        annulus_sum = u.Quantity(tbl2['aperture_sum'][0])
        annulus_mean = annulus_sum / aper2.area()
        aperture_bkg = annulus_mean * apertures.area()
        median_phot_value = aperture_sum - aperture_bkg

        if save_intermediate_results:
            log.info("Writing out MEDIAN image to: {}".format(
                     median_model.meta.filename))
            median_model.save(median_model.meta.filename)

        # Scale the median image by the initial photometry (only in aperture)
        # to create equivalent of 'blot' images
        # Area outside of aperture in median will remain unchanged
        blot_models = datamodels.ModelContainer()
        for i in range(nints):
            scale_factor = float(phot_values[i] / median_phot_value)
            scaled_image = datamodels.ImageModel(init=median_model.data.shape)
            scaled_image.meta = deepcopy(median_model.meta)
            scaled_data = (median_model.data * (scale_factor * median_mask) + (
                           median_model.data * inv_median_mask))
            scaled_image.data = scaled_data
            blot_models.append(scaled_image)

        if save_intermediate_results:
            log.info("Writing out Scaled Median images...")

            def make_output_path(ignored, idx=None):
                output_path = self.make_output_path(
                    basepath=base_filename, suffix='blot', idx=idx,
                    component_format='_{asn_id}_{idx}'
                )
                return output_path

            blot_models.save(make_output_path)

        # Perform outlier detection using statistical comparisons between
        # each original input image and its blotted version of the median image
        self.detect_outliers(blot_models)

        # clean-up (just to be explicit about being finished
        # with these results)
        del median_model, blot_models