Ejemplo n.º 1
0
def find_sources_array(image,fwhm,show=False):
	if show == True:
		print('Obtain the point source locations at the FITS image = \n')
		print(image)
		print('\n')
	# im,hdr = fits.getdata(image,header=True) #reading the fits image (data + header)
	# im = np.array(im,dtype='Float64') #transform the data for a matrix
	tam = np.shape(image) #dimension of the matrix
	mean, median, std = sigma_clipped_stats(image, sigma=fwhm, iters=5)
	if show == True:
		print('Mean, Median, STD = \n')
		print(mean, median, std)
		print('\n')
	sources = daofind(image - median,fwhm=fwhm, threshold=5.*std)
	if show == True:
		print('Sources found! \n')
		print(sources)
		print('\n')
		plt.figure()
		plt.imshow(image,origin='lower', cmap=plt.cm.gray,vmin=np.mean(image)-np.std(image),
			vmax=np.mean(image)+np.std(image))
		plt.colorbar()
		plt.scatter(sources['xcentroid'],sources['ycentroid'],color='red')
	#Transform astropy table in a pandas dataframe
	data = []
	for i in sources.keys():
		data.append(sources[i])
	data = DataFrame(data).T
	data.columns = sources.keys()
	return data 
Ejemplo n.º 2
0
def calc_offset_slow(image_phot):
    "Find the offset between the images."
    "This uses a painfully slow brute force approach. Definitely not recommended."
    "Hasn't been tested as a function"
    
    corr = np.zeros((60,60))
    for i in range(60):
        for j in range(60):
            corr[i,j] = np.sum(np.maximum(im1, np.roll(np.roll(im0,i-30,axis=0),j-30,axis=1)))

# Normalize the correlation array so there is one peak, with range 0 .. 1
     
    corr = corr - np.min(corr)
    corr = np.max(corr) - corr
    corr = corr / np.max(corr)

# Use DAOfind to locate this peak. Could do it other methods, but this works well, even if overkill.
    
    s2 = daofind(corr, fwhm=10, threshold=0.8)

# Now get the offsets. This is in pixels.

    dx = int(np.round(s2['xcentroid'][0]))
    dy = int(np.round(s2['ycentroid'][0]))
    
# And roll the star catalog image

    im0_rolled = np.roll(np.roll(image_0,dy-30,axis=0), dx-30, axis=1)        
    
    return im0_rolled
Ejemplo n.º 3
0
def convert_rows_to_wv(direct_file, grism_file, rows):
    """ Converts the rows to wavelength bins. 

    Parameters
    ----------
    direct_file : str
        The path to the direct file.
    grism_file : str
        The path to the grism file.
    rows : array
        The array of rows that correspond to the spatial scan.

    Returns
    ------
    wv : array
        The array solution for for wavelength.
    """

    # Collect data from FITS headers
    with fits.open(grism_file) as hdu:
        hdr = hdu[0].header
        hdr1 = hdu[1].header
        sci_postarg_1 = hdr['POSTARG1']
        sci_postarg_2 = hdr['POSTARG2']
        sci_crpix_1 = hdr1['CRPIX1'] # this isn't a real keyword...
        sci_crpix_2 = hdr1['CRPIX2'] 

    with fits.open(direct_file) as hdu:
        hdr = hdu[0].header
        hdr1 = hdu[1].header
        data = hdu[1].data
        cal_postarg_1 = hdr['POSTARG1']
        cal_postarg_2 = hdr['POSTARG2']
        cal_crpix_1 = hdr1['CRPIX1']
        cal_crpix_2 = hdr1['CRPIX2']


    # Find the central source
    mean, med, std = sigma_clipped_stats(data, sigma=3.0, iters=5)
    sources = daofind(data-med, fwhm=3.0, threshold=5.*std)
    
    source = sources[np.where(sources['flux'] == np.max(sources['flux']))]
    x_cen, y_cen = source['xcentroid'], source['ycentroid']


    # Calculate the offset
    x_offset = sci_crpix_1 - cal_crpix_1 + (sci_postarg_1 - cal_postarg_1)/0.135
    y_offset = sci_crpix_2 - cal_crpix_2 + (sci_postarg_2 - cal_postarg_2)/0.121

    pos_x, pos_y = x_cen + x_offset, y_cen + y_offset

    constants_0 = [8.95E3, 9.35925E-2, 0.0, 0.0, 0.0, 0.0]
    constants_1 = [4.51423E1, 3.17239E-4, 2.17055E-3, -7.42504E-7, 3.4863E-7, 3.09213E-7]

    coords_0 = constants_0[0] + constants_0[1]*pos_x + constants_0[2]*pos_y
    coords_1 = constants_1[0] + constants_1[1]*pos_x + constants_1[2]*pos_y + constants_1[3]*pos_x**2 + constants_1[4]*pos_x*pos_y + constants_1[5]*pos_y**2
    
    wv = coords_0 + coords_1*(rows-pos_x) + pos_y

    return wv
Ejemplo n.º 4
0
def source_detection(ccd, fwhm=3.0, sigma=3.0, iters=5, threshold=5.0):
    """
    Returns an astropy table containing the position of sources within the image.

    Parameters
    ----------------

    ccd : numpy.ndarray
        The CCD Image array.

    fwhm : float, optional
        Full-width half-max of stars in the image.

    sigma : float, optional.
        The number of standard deviations to use as the lower and upper clipping limit.

    iters : int, optional
        The number of iterations to perform sigma clipping
    
    threshold : float, optional.
        The absolute image value above which to select sources.
    
    Returns
    -----------

    sources
        an astropy table of the positions of sources in the image.
    """
    data = ccd.data
    mean, median, std = sigma_clipped_stats(data, sigma=sigma, iters=iters)
    sources = daofind(data - median, fwhm=fwhm, threshold=threshold*std)
    return sources
Ejemplo n.º 5
0
def do_photometry(hdu, extensions=None, threshold=5, fwhm=2.5):

    if extensions is None:
        extensions = np.arange(1, len(hdu))
    if not isiterable(extensions):
        extensions = (extensions, )

    output = {}
    for ext in extensions:
        header = hdu[ext].header
        data = hdu[ext].data
        image_wcs = WCS(header)

        background = mad_std(data)

        sources = daofind(data, threshold=threshold * background, fwhm=fwhm)
        positions = (sources['xcentroid'], sources['ycentroid'])
        sky_positions = pixel_to_skycoord(*positions, wcs=image_wcs)

        apertures = CircularAperture(positions, r=2.)
        photometry_table = aperture_photometry(data, apertures)
        photometry_table['sky_center'] = sky_positions

        output[str(ext)] = photometry_table

    return output
Ejemplo n.º 6
0
def find_source_daofind(frame, ellipse, config, track_record, special=False):
    """
    This function ...
    :param data:
    :return:
    """

    # TODO: FIX THIS FUNCTION

    sigma_level = 5.0

    # Calculate the sigma-clipped statistics of the data
    mean, median, std = sigma_clipped_stats(data, sigma=3.0)

    result_table = daofind(data - median,
                           fwhm=3.0,
                           threshold=sigma_level * std)

    result_table.rename_column('xcentroid', 'x_peak')
    result_table.rename_column('ycentroid', 'y_peak')

    # If requested, make a plot with the source(s) indicated
    if plot:
        plotting.plot_peaks(data,
                            result_table['x_peak'],
                            result_table['y_peak'],
                            radius=4.0)

    # Return the list of source positions
    #return result_table, median

    source = []
    return source
Ejemplo n.º 7
0
def find_source_daofind(frame, ellipse, config, track_record, special=False):

    """
    This function ...
    :param data:
    :return:
    """

    # TODO: FIX THIS FUNCTION

    sigma_level = 5.0

    # Calculate the sigma-clipped statistics of the data
    mean, median, std = sigma_clipped_stats(data, sigma=3.0)

    result_table = daofind(data - median, fwhm=3.0, threshold=sigma_level*std)

    result_table.rename_column('xcentroid', 'x_peak')
    result_table.rename_column('ycentroid', 'y_peak')

    # If requested, make a plot with the source(s) indicated
    if plot: plotting.plot_peaks(data, result_table['x_peak'], result_table['y_peak'], radius=4.0)

    # Return the list of source positions
    #return result_table, median

    source = []
    return source
Ejemplo n.º 8
0
def try_dao(fitsfile, outfile='out.png'):

    hdulist = fits.open(fitsfile)
    hdu = hdulist[0]

    if len(hdu.data.shape) == 3:
        data = hdu.data[0]
    else:
        data = hdu.data

    mean, median, std = sigma_clipped_stats(data[800:900, 800:900],
                                            sigma=3.0,
                                            iters=5)
    print(mean, median, std)

    #data = hdu.data
    sources = daofind(data - median, fwhm=3.0, threshold=5. * std)
    print 'Found %i sources' % len(sources)

    positions = (sources['xcentroid'], sources['ycentroid'])
    apertures = CircularAperture(positions, r=4.)
    norm = ImageNormalize(stretch=SqrtStretch(), vmin=2000, vmax=3000)
    plt.imshow(data, cmap='Greys', origin='lower', norm=norm)
    plt.title('%i Sources from a single all-sky frame' % len(sources))
    apertures.plot(color='blue', lw=1.5, alpha=0.5)
    plt.savefig(filename=outfile)
Ejemplo n.º 9
0
    def _detect_sources(self):
        from photutils import daofind

        fwhm = 3.
        detection_threshold = 3.
        sources = daofind(self.image,
                          threshold=(self._med + self._std *
                          detection_threshold), fwhm=fwhm)
        pl.plot(sources['xcentroid'], sources['ycentroid'], 'r.')
Ejemplo n.º 10
0
def findStars(image):
    # In order to use the MAD as a consistent estimator for the estimation
    # of the standard deviation σ, one takes σ = k * MAD where k is a constant
    # scale factor, which depends on the distribution. For normally distributed
    # data k is taken to be k = 1.48[26]
    bkg_sigma = 1.48 * mad(image)
    stars = daofind(image, fwhm=3.0, threshold=5 * bkg_sigma)
    #print stars
    return stars
Ejemplo n.º 11
0
def make_tweakreg_catalog(model, kernel_fwhm, snr_threshold, sharplo=0.2,
                          sharphi=1.0, roundlo=-1.0, roundhi=1.0):
    """
    Create a catalog of point-line sources to be used for image
    alignment in tweakreg.

    Parameters
    ----------
    model : `ImageModel`
        The input `ImageModel` of a single image.  The input image is
        assumed to be background subtracted.

    kernel_fwhm : float
        The full-width at half-maximum (FWHM) of the 2D Gaussian kernel
        used to filter the image before thresholding.  Filtering the
        image will smooth the noise and maximize detectability of
        objects with a shape similar to the kernel.

    snr_threshold : float
        The signal-to-noise ratio per pixel above the ``background`` for
        which to consider a pixel as possibly being part of a source.

    sharplo : float, optional
        The lower bound on sharpness for object detection.

    sharphi : float, optional
        The upper bound on sharpness for object detection.

    roundlo : float, optional
        The lower bound on roundess for object detection.

    roundhi : float, optional
        The upper bound on roundess for object detection.

    Returns
    -------
    catalog : `~astropy.Table`
        An astropy Table containing the source catalog.
    """

    if not isinstance(model, ImageModel):
        raise ValueError('The input model must be a ImageModel.')

    # threshold = snr_threshold * model.err   # can't input img to daofind
    threshold_img = photutils.detect_threshold(model.data, snr=snr_threshold)
    threshold = threshold_img[0, 0]     # constant image

    sources = photutils.daofind(model.data, fwhm=kernel_fwhm,
                                threshold=threshold, sharplo=sharplo,
                                sharphi=sharphi, roundlo=roundlo,
                                roundhi=roundhi)

    columns = ['id', 'xcentroid', 'ycentroid', 'flux']
    catalog = sources[columns]

    return catalog
def getcentroids(imagedata,box):
    nx = len(imagedata)
    xmin,xmax,ymin,ymax = box #box borders, eye-balled
    
    if nx < 50:
        rows = np.arange(0,nx)
        centroids = np.zeros((nx,2))
        for i in rows:
            #Index image data within defined box
            image_box = imagedata[i][ymin:ymax, xmin:xmax] 
            #Using DAOFIND to locate the star within the defined box    
            sources = daofind(image_box,100.,2.126, exclude_border=True)
            #Add xmin and ymin values onto the found sources for true pixel value
            centroids[i,:] = [sources['xcentroid']+xmin, sources['ycentroid']+ymin]
    else: #Condition applied to part 8
        image_box = imagedata[ymin:ymax, xmin:xmax]
        sources = daofind(image_box,100.,2.216, exclude_border=True)
        centroids = [sources['xcentroid']+xmin,sources['ycentroid']+ymin]
    return centroids
Ejemplo n.º 13
0
    def _detect_sources(self):
        from photutils import daofind

        fwhm = 3.
        detection_threshold = 3.
        sources = daofind(self.image,
                          threshold=(self._med +
                                     self._std * detection_threshold),
                          fwhm=fwhm)
        pl.plot(sources['xcentroid'], sources['ycentroid'], 'r.')
Ejemplo n.º 14
0
def find_stars(im):
    "Locate stars in an array, using DAOphot. Returns N x 2 array with xy positions. No magnitudes."
         
    mean, median, std = sigma_clipped_stats(im, sigma=3.0, iters=5)
    sources = daofind(im - median, fwhm=3.0, threshold=5.*std)
    x_phot = sources['xcentroid']
    y_phot = sources['ycentroid']
        
    points_phot = np.transpose((x_phot, y_phot)) # Create an array N x 2

    return points_phot
Ejemplo n.º 15
0
def starbright(fnstar, fnflat, istar, axs, fg):
    # %% load data
    data = meanstack(fnstar, 100)[0]
    # %% flat field
    flatnorm = readflat(fnflat, fnstar)
    data = (data / flatnorm).round().astype(data.dtype)
    # %% background
    mean, median, std = sigma_clipped_stats(data, sigma=3.0)

    rfact = data.shape[0] // 40
    cfact = data.shape[1] // 40
    bg = Background(data, (rfact, cfact), interp_order=1, sigclip_sigma=3)
    # http://docs.astropy.org/en/stable/units/#module-astropy.units
    # dataphot = (data - bg.background)*u.ph/(1e-4*u.m**2 * u.s * u.sr)
    #   data = (data-0.97*data.min()/bg.background.min()*bg.background) * u.ph/(u.cm**2 * u.s * u.sr)
    data = data * u.ph / (u.cm**2 * u.s * u.sr)
    # %% source extraction
    sources = daofind(data, fwhm=3.0, threshold=5 * std)
    # %% star identification and quantification
    XY = column_stack((sources["xcentroid"], sources["ycentroid"]))
    apertures = CircularAperture(XY, r=4.0)
    norm = ImageNormalize(stretch=SqrtStretch())

    flux = apertures.do_photometry(data, effective_gain=camgain)[0]
    # %% plots
    fg.suptitle("{}".format(fnflat.parent), fontsize="x-large")

    hi = axs[-3].imshow(flatnorm, interpolation="none", origin="lower")
    fg.colorbar(hi, ax=axs[-3])
    axs[-3].set_title("flatfield {}".format(fnflat.name))

    hi = axs[-2].imshow(bg.background, interpolation="none", origin="lower")
    fg.colorbar(hi, ax=axs[-2])
    axs[-2].set_title("background {}".format(fnstar.name))

    hi = axs[-1].imshow(data.value,
                        cmap="Greys",
                        origin="lower",
                        norm=norm,
                        interpolation="none")
    fg.colorbar(hi, ax=axs[-1])
    for i, xy in enumerate(XY):
        axs[-1].text(xy[0],
                     xy[1],
                     str(i),
                     ha="center",
                     va="center",
                     fontsize=16,
                     color="w")
    apertures.plot(ax=axs[-1], color="blue", lw=1.5, alpha=0.5)
    axs[-1].set_title("star {}".format(fnstar.name))

    return flux[istar]
Ejemplo n.º 16
0
def find_sources(imageFile, data, seeing_in_pix, threshold=5.):
    # estimate the 1-sigma noise level using the median absolute deviation of the image
    print "[*] Estimating 1-sigma noise level."
    # generate a mask for 0 pixel counts. These are chip gaps or skycell edges generated by
    # np.nan_to_num and will affect noise level estimate.
    mask = np.where(data != 0)
    bkg_sigma = mad_std(data[mask])
    #print np.median(data), mad(data), bkg_sigma
    # use daofind to detect sources setting
    print "[*] Detecting %d-sigma sources in %s" % (threshold, imageFile)
    sources = daofind(data, fwhm=seeing_in_pix, threshold=threshold*bkg_sigma)
    print "[*] Source detection successful."
    print "\t[i] %d sources detected: " % (len(sources["xcentroid"]))
    print
    print sources
    return sources, bkg_sigma
Ejemplo n.º 17
0
def get_fiber_flux(file,fwhm=20,threshold=100):

    image,header = qi.readimage(file,plot=False)
    mean, median, std = sigma_clipped_stats(image, sigma=3.0)

    source = daofind(image - median, fwhm=fwhm, threshold=threshold*std)
    xcen = np.int(np.round(source['xcentroid'][0]))
    ycen = np.int(np.round(source['ycentroid'][0]))

    plt.plot([xcen],[ycen],'rx',markersize=20)

    params = fitgaussian(image)
    
    y,x = params[1],params[2]

    apdict = tp.optimal_aperture(x,y,image,[150,160])
    return apdict
def find_sources(imageFile, data, seeing_in_pix, threshold=5.):
    # estimate the 1-sigma noise level using the median absolute deviation of the image
    print "[*] Estimating 1-sigma noise level."
    # generate a mask for 0 pixel counts. These are chip gaps or skycell edges generated by
    # np.nan_to_num and will affect noise level estimate.
    mask = np.where(data != 0)
    bkg_sigma = mad_std(data[mask])
    #print np.median(data), mad(data), bkg_sigma
    # use daofind to detect sources setting
    print "[*] Detecting %d-sigma sources in %s" % (threshold, imageFile)
    sources = daofind(data,
                      fwhm=seeing_in_pix,
                      threshold=threshold * bkg_sigma)
    print "[*] Source detection successful."
    print "\t[i] %d sources detected: " % (len(sources["xcentroid"]))
    print
    print sources
    return sources, bkg_sigma
Ejemplo n.º 19
0
def starbright(fnstar,fnflat,istar,axs,fg):
    #%% load data
    data = meanstack(fnstar,100)[0]
    #%% flat field
    flatnorm = readflat(fnflat,fnstar)
    data = (data/flatnorm).round().astype(data.dtype)
    #%% background
    mean, median, std = sigma_clipped_stats(data, sigma=3.0)

    rfact=data.shape[0]//40
    cfact=data.shape[1]//40
    bg = Background(data,(rfact,cfact),interp_order=1, sigclip_sigma=3)
# http://docs.astropy.org/en/stable/units/#module-astropy.units
    #dataphot = (data - bg.background)*u.ph/(1e-4*u.m**2 * u.s * u.sr)
 #   data = (data-0.97*data.min()/bg.background.min()*bg.background) * u.ph/(u.cm**2 * u.s * u.sr)
    data = data* u.ph/(u.cm**2 * u.s * u.sr)
    #%% source extraction
    sources = daofind(data, fwhm=3.0, threshold=5*std)
    #%% star identification and quantification
    XY = column_stack((sources['xcentroid'], sources['ycentroid']))
    apertures = CircularAperture(XY, r=4.)
    norm = ImageNormalize(stretch=SqrtStretch())

    flux = apertures.do_photometry(data,effective_gain=camgain)[0]
#%% plots
    fg.suptitle('{}'.format(fnflat.parent),fontsize='x-large')

    hi = axs[-3].imshow(flatnorm,interpolation='none',origin='lower')
    fg.colorbar(hi,ax=axs[-3])
    axs[-3].set_title('flatfield {}'.format(fnflat.name))

    hi = axs[-2].imshow(bg.background,interpolation='none',origin='lower')
    fg.colorbar(hi,ax=axs[-2])
    axs[-2].set_title('background {}'.format(fnstar.name))

    hi = axs[-1].imshow(data.value,
                    cmap='Greys', origin='lower', norm=norm,interpolation='none')
    fg.colorbar(hi,ax=axs[-1])
    for i,xy in enumerate(XY):
        axs[-1].text(xy[0],xy[1], str(i),ha='center',va='center',fontsize=16,color='w')
    apertures.plot(ax=axs[-1], color='blue', lw=1.5, alpha=0.5)
    axs[-1].set_title('star {}'.format(fnstar.name))

    return flux[istar]
Ejemplo n.º 20
0
def process_file(inpath, file_name, t_constant, sigma, fwhm, r, kernel_size, outpath, plot):
    print "Processing " + file_name
    hdulist = fits.open(inpath + file_name)
    image = hdulist[0].data

    if isinstance(sigma, list):
        threshold = calc_sigma(image, sigma[0], sigma[1]) * t_constant
    else:
        threshold = t_constant*sigma

    median_out = signal.medfilt(image,kernel_size)
    median_sub = np.subtract(image,median_out)
    sources = daofind(median_sub, threshold, fwhm)

    sources_2 = np.array(sources["id", "xcentroid", "ycentroid", "sharpness", "roundness1", "roundness2", "npix", "sky", "peak", "flux", "mag"])
    print_line= (file_name+","+str(sources_2))
    base_name = os.path.splitext(file_name)[0]
    file = open(outpath + base_name + ".out", "a")
    file.write(print_line)
    file.close()

    positions = (sources['xcentroid'], sources['ycentroid'])
#    print positions
    apertures = CircularAperture(positions, r)
    phot_table = aperture_photometry(median_sub, apertures)
    phot_table_2 = np.array(phot_table["aperture_sum", "xcenter", "ycenter"])
    print_line= (","+str(phot_table_2)+"\n")
    file = open(outpath + base_name + ".out", "a")
    file.write(print_line)
    file.write("\n")
    file.close()

    hdulist[0].data = median_sub
    file = open(outpath + base_name + ".fits", "w")
    hdulist.writeto(file)
    file.close()

    if plot:
        median_sub[median_sub<=0]=0.0001
        plt.imshow(median_sub, cmap='gray', origin='lower')
        apertures.plot(color='blue', lw=1.5, alpha=0.5)
        plt.show()
Ejemplo n.º 21
0
def star_detection(filename):
    ''' Detect stars, create scatter plot'''
    image_data = open_fits(filename)
    mean, median, std = sigma_clipped_stats(image_data, sigma=3.0, iters=5)
    print mean, median, std
    sources = daofind(image_data - median, fwhm=3.0, threshold=5.*std)
    print sources
    x_val = [x[1] for x in sources]
    y_val = [x[2] for x in sources]
    pyplot.imshow(image_data, norm=LogNorm())
    pyplot.scatter(x_val, y_val, color='k', marker='o', label='Detected stars')
    pyplot.xlim(1800, 2200)
    pyplot.ylim(1800, 2200)
    pyplot.colorbar()
    pyplot.legend()
    pyplot.title('Star detection')
    pyplot.xlabel('X-axis [pixels]')
    pyplot.ylabel('Y-axis [pixels]')
    pyplot.savefig('star_detection.pdf')
    pyplot.show()
Ejemplo n.º 22
0
def find_stars(image, plot = False, fwhm = 20.0, threshold=3.):

    from astropy.stats import sigma_clipped_stats
    mean, median, std = sigma_clipped_stats(image, sigma=3.0)
    from photutils import daofind
    sources = daofind(image - median, fwhm=fwhm, threshold=threshold*std)
    
   # stars already found accurately, vet_sources will be implemented when working properly
   # vet_sources(10.0,10.0)
        
    if plot == True:
       # from astropy.visualization import SqrtStretch
       # from astropy.visualization.mpl_normalize import ImageNormalize
        positions = (sources['xcentroid'], sources['ycentroid'])
        apertures = CircularAperture(positions, r=4.)
        #norm = ImageNormalize(stretch=SqrtStretch())
        #plt.imshow(image, cmap='Greys', origin='lower', norm=norm)
        qi.display_image(image)
        apertures.plot(color='blue', lw=1.5, alpha=0.5)
        
    return sources
Ejemplo n.º 23
0
def try_dao(fitsfile, outfile='out.png'):
   
    hdulist = fits.open(fitsfile)
    hdu = hdulist[0]

    if len(hdu.data.shape) == 3:
        data = hdu.data[0]
    else:
        data = hdu.data

    mean, median, std = sigma_clipped_stats(data[800:900, 800:900], sigma=3.0, iters=5)
    print(mean, median, std)

    #data = hdu.data
    sources = daofind(data - median, fwhm=3.0, threshold=5.*std)
    print 'Found %i sources' % len(sources)

    positions = (sources['xcentroid'], sources['ycentroid'])
    apertures = CircularAperture(positions, r=4.)
    norm = ImageNormalize(stretch=SqrtStretch(), vmin=2000, vmax=3000)
    plt.imshow(data, cmap='Greys', origin='lower', norm=norm)
    plt.title('%i Sources from a single all-sky frame' % len(sources))
    apertures.plot(color='blue', lw=1.5, alpha=0.5)
    plt.savefig(filename=outfile)
Ejemplo n.º 24
0
def make_tweakreg_catalog(model,
                          kernel_fwhm,
                          snr_threshold,
                          sharplo=0.2,
                          sharphi=1.0,
                          roundlo=-1.0,
                          roundhi=1.0):
    """
    Create a catalog of point-line sources to be used for image
    alignment in tweakreg.

    Parameters
    ----------
    model : `ImageModel`
        The input `ImageModel` of a single image.  The input image is
        assumed to be background subtracted.

    kernel_fwhm : float
        The full-width at half-maximum (FWHM) of the 2D Gaussian kernel
        used to filter the image before thresholding.  Filtering the
        image will smooth the noise and maximize detectability of
        objects with a shape similar to the kernel.

    snr_threshold : float
        The signal-to-noise ratio per pixel above the ``background`` for
        which to consider a pixel as possibly being part of a source.

    sharplo : float, optional
        The lower bound on sharpness for object detection.

    sharphi : float, optional
        The upper bound on sharpness for object detection.

    roundlo : float, optional
        The lower bound on roundess for object detection.

    roundhi : float, optional
        The upper bound on roundess for object detection.

    Returns
    -------
    catalog : `~astropy.Table`
        An astropy Table containing the source catalog.
    """

    if not isinstance(model, ImageModel):
        raise ValueError('The input model must be a ImageModel.')

    # threshold = snr_threshold * model.err   # can't input img to daofind
    threshold_img = photutils.detect_threshold(model.data, snr=snr_threshold)
    threshold = threshold_img[0, 0]  # constant image

    sources = photutils.daofind(model.data,
                                fwhm=kernel_fwhm,
                                threshold=threshold,
                                sharplo=sharplo,
                                sharphi=sharphi,
                                roundlo=roundlo,
                                roundhi=roundhi)

    columns = ['id', 'xcentroid', 'ycentroid', 'flux']
    catalog = sources[columns]

    return catalog
Ejemplo n.º 25
0
#    file.close()
#
#    import matplotlib.pylab as plt
#    im2 = image
#    im2[im2<=0]=0.0001
#    plt.imshow(im2, cmap='gray', origin='lower')
#    apertures.plot(color='blue', lw=1.5, alpha=0.5)
#    plt.show()

hdulist = fits.open(inpath+file_name)
image = hdulist[0].data
#image = image.astype(float) - np.median(image)
from photutils import daofind
from astropy.stats import mad_std
bkg_sigma = mad_std(image)
sources = daofind(image, fwhm, threshold*bkg_sigma)
#print_line= (file_name+","+str(sources_2)+"\n")
sources_2 = np.array(sources["id", "xcentroid", "ycentroid", "sharpness", "roundness1", "roundness2", "npix", "sky", "peak", "flux", "mag"])
print_line= (file_name+","+str(sources_2))
file= open(outpath, "a")
file.write(print_line)
file.close()

from photutils import aperture_photometry, CircularAperture
positions = (sources['xcentroid'], sources['ycentroid'])
apertures = CircularAperture(positions, r)
phot_table = aperture_photometry(image, apertures)
phot_table_2 = np.array(phot_table["aperture_sum", "xcenter", "ycenter"])
print_line= (","+str(phot_table_2)+"\n")
file= open(outpath, "a")
file.write(print_line)
ax2 = figLook.add_subplot(122)
ax1.imshow(imageShow1, cmap='Greys', norm=LogNorm())
ax1.set_title('%s-band'%hdrShow1['FILTER2'])
ax2.imshow(imageShow2, cmap='Greys', norm=LogNorm())
ax2.set_title('%s-band'%hdrShow2['FILTER2'])

# Move the figure on the screen
thismanager = get_current_fig_manager()
thismanager.window.wm_geometry("+1100+0")
#figLook.canvas.manager.window.SetPosition((500, 0))
#####################################

# Choose how to do source extraction
threshold = thres*std1
#sources = ph.irafstarfind(imageDataRed, threshold=threshold, fwhm=fwhm)#, exclude_border=True) 
sources = ph.daofind(image1, threshold=threshold, fwhm=fwhm, sigma_radius=sigma1)#, exclude_border=True) 
#print(sources)

# Put a mask here that we might not need anymore
Xs = sources['xcentroid']#[np.where(sources['fwhm']<2)]
Ys = sources['ycentroid']#[np.where(sources['fwhm']<2)]

positions = (Xs.data, Ys.data)
apertures = ph.CircularAperture(positions, r=10.)

fig = plt.figure(10, figsize=(12,12))
ax = fig.add_subplot(111)

mutable = {}
fig.canvas.mpl_connect('button_press_event', onclick)
Ejemplo n.º 27
0
for i in range(0, target_b.size - edit):
    image_b, hd1 = fits.getdata(dir_t + target_b[i], header=True)
    hd = hd1['UTC']
    hour_b[i] = float(hd[9:11])
    minute_b[i] = float((hd[11:13])) / 60.0
    second_b[i] = float((hd[13:22])) / 3600.0
    time_b[i] = hour_b[i] + minute_b[i] + second_b[i]

    image_b = (image_b - darkavg) / normflat
    slice_comp1 = image_b[1237:1337, 760:860]  #[795:825,1272:1302]
    slice_target = image_b[650:750, 620:720]  #[660:690,690:720]
    slice_comp2 = image_b[1315:1415, 1330:1430]  #[1365:1395,1350:1380]

    sources_b = daofind(slice_target,
                        fwhm=FWHM,
                        threshold=thresh,
                        exclude_border=True)
    positions_b = zip(sources_b['xcentroid'], sources_b['ycentroid'])
    apertures_b = CircularAperture(positions_b, r=aperture_r)
    phot_table_b = aperture_photometry(slice_target, apertures_b)

    mag_b[i] = np.max(phot_table_b['aperture_sum']) / exposure_time

    sources_b1 = daofind(slice_comp1,
                         fwhm=FWHM,
                         threshold=thresh,
                         exclude_border=True)
    positions_b1 = zip(sources_b1['xcentroid'], sources_b1['ycentroid'])
    apertures_b1 = CircularAperture(positions_b1, r=aperture_r1)
    phot_table_b1 = aperture_photometry(slice_comp1, apertures_b1)
Ejemplo n.º 28
0
    def extension(self,
                  extension_idx,
                  threshold='',
                  FWHM=3.0,
                  sigma=3.0,
                  snr=50.,
                  plot=False):
        '''
    A method to run aperatue photometry routines on an individual extension and save the results to the exposure class
    
    Parameters
    ----------
    extension_idx: int
      Index of the extension
    threshold: float (optional)
      The absolute image value above which to select sources
    FWHM: float
      The full width at half maximum
    sigma: float
      Number of standard deviations to use for background estimation
    snr: float
      The signal-to-noise ratio to use in the threshold detection
    plot: bool
      Plot the field with identified sources circled      

    Returns
    -------
    source_list: table
      A source list for the image

    '''

        # Define the data array
        data = self.hdulist[extension_idx].data.astype(np.float)

        # Extract the header and create a WCS object
        hdr = self.hdulist[extension_idx].header
        wcs = WCS(hdr)

        # Estimate the background and background noise
        mean, median, std = sigma_clipped_stats(data, sigma=sigma, iters=5)

        # Calculate the detection threshold and FWHM if not provided
        if not threshold: threshold = np.mean(detect_threshold(data, snr=snr))

        # Print the parameters being used
        for p, v in zip(['mean', 'median', 'std', 'threshold', 'FWHM'],
                        [mean, median, std, threshold, FWHM]):
            print '{!s:10}: {:.3f}'.format(p, v)

        # Subtract background and generate sources list of all detections
        sources = daofind(data - median, threshold, FWHM)

        # Map RA and Dec to pixels
        positions = (sources['xcentroid'], sources['ycentroid'])
        skycoords = pixel_to_skycoord(*positions, wcs=wcs)

        # Calculate magnitudes at given source positions
        apertures = CircularAperture(positions, r=2.)
        photometry_table = aperture_photometry(data, apertures)

        # 'skycoords' IRCS object is problematic for stacking tables so for now we'll just add the ra and dec
        # photometry_table['sky_center'] = skycoords
        photometry_table['ra'], photometry_table[
            'dec'] = skycoords.ra, skycoords.dec

        # Update data in the exposure object
        self.source_table = vstack([self.source_table, photometry_table],
                                   join_type='inner')

        # Plot the sources
        if plot:
            norm = ImageNormalize(stretch=SqrtStretch())
            plt.imshow(data, cmap='Greys', origin='lower', norm=norm)
            apertures.plot(color='blue', lw=1.5, alpha=0.5)

        print '{!s:10}: {}'.format('sources', len(sources))
Ejemplo n.º 29
0
##################################################################################
#                                                                                              +++HR DIAGRAM!+++                                                                                          #
##################################################################################


mosaic_area = fits.getdata(dir + '/Mosaic_B/mosaic_area.fits')
#mosaic area plot:
plt.figure(1) 
plt.imshow(mosaic_area, origin='lower')
plt.xlabel('Pixels', fontsize=15)
plt.ylabel('Pixels', fontsize=15)

mosaic_area_cropped = mosaic_area[400:1490, 380:1610]
raw_cropped = 

sourcesB = daofind(nosky_crop, fwhm=5.0, threshold=1.5*bkg_sigma)


#background subtraction:
#phot_table = aperture_photometry(data - bkg, apertures)
data = 
apertures = CircularAperture(positions, r=3)
annulus_apertures = CircularAnnulus(positions, r_in=6, r_out=8)
rawflux_table = aperture_photometry(data, apertures)
bkgflux_table = aperture_photometry(data, annulus_apertures)
phot_table = hstack([rawflux_table, bkgflux_table], table_names=['raw', 'bkg'])

aperture_area = np.pi*3**2
annulus_area = np.pi*(8**2-6**2)
final_sum = (phot_table['aperture_sum_raw']-phot_table['aperture_sum_bkg'] * aperture_area/annulus_area)
phot_table = ['residual_aperture_sum']
Ejemplo n.º 30
0
def fit_sources(image1d, psfbase, shape, normperim, medianim, mastermask,
                threshold=12, **kwargs):
    '''find and fit sources in the image

    perform PSF subtraction and then find and fit sources
    see comments in code for details

    Parameters
    ----------
    image1d : ndarray
        flattened, normalized image
    psfbase : ndarray
       2d array of psf templates (PSF library)
    threshold : float
        Detection threshold. Higher numbers find only the stronger sources.
        Experiment to find the right value.
    kwargs : dict or names arguments
        arguments for daofind (fwmh, min and max roundness, etc.)

    Returns
    -------
    fluxes_gaussian : astropy.table.Table
    imag :
        PSF subtracted image
    scaled_im :
        PSF subtracted image in daofind scaling

    '''
    psf_coeff = psf_from_projection(image1d, psfbase)
    im = image1d - np.dot(psfbase, psf_coeff)
    bkg_sigma = 1.48 * mad(im)

    # Do source detection on 2d, scaled image
    scaled_im = remove_normmask(im.reshape((-1, 1)), np.ones(1), np.ones_like(medianim), mastermask).reshape(shape)
    imag = remove_normmask(im.reshape((-1, 1)), normperim, medianim, mastermask).reshape(shape)
    sources = photutils.daofind(scaled_im, threshold=threshold * bkg_sigma, **kwargs)

    if len(sources) == 0:
        return None, imag, scaled_im
    else:
        # insert extra step here to find the brightest source, subtract it and
        # redo the PSF fit or add a PSF model to psfbase to improve the PSF fit
        # I think 1 level of that is enough, no infinite recursion.
        # Idea 1: mask out a region around the source, so that this does not
        #         influence the PSF fit.
        newmask = deepcopy(mastermask).reshape(shape)
        for source in sources:
            sl, temp = overlap_slices(shape, [9,9], [source['xcentroid'], source['ycentroid']])
            newmask[sl[0], sl[1]] = True
        newmask = newmask.flatten()

        psf_coeff = psf_from_projection(image1d[~(newmask[~mastermask])],
                                        psfbase[~(newmask[~mastermask]), :])
        im = image1d - np.dot(psfbase, psf_coeff)
        scaled_im = remove_normmask(im.reshape((-1, 1)), np.ones(1), np.ones_like(medianim), mastermask).reshape(shape)

        imag = remove_normmask(im.reshape((-1, 1)), normperim, medianim, mastermask).reshape(shape)
        # cosmics in the image lead to high points, which means that the
        # average area will be overcorrected
        imag = imag - np.ma.median(imag)
        # do photometry on image in real space

        psf_gaussian = photutils.psf.GaussianPSF(1.8)  # width measured by hand
        # default in photutils is to freeze this stuff, but I disagree
        # psf_gaussian.fixed['sigma'] = False
        # psf_gaussian.fixed['x_0'] = False
        # psf_gaussian.fixed['y_0'] = False
        fluxes_gaussian = photutils.psf.psf_photometry(imag, sources['xcentroid', 'ycentroid'], psf_gaussian)

        '''Estimate flux of Gaussian PSF from A and sigma.

        Should be part of photutils in a more clever (analytic) implementation.
        As long as it's missing there, but in this crutch here.
        '''
        x, y = np.mgrid[-3:3, -4:4]
        amp2flux = np.sum(psf_gaussian.evaluate(x, y, 1, 1, 0, 1.8))  # 1.8 hard-coded above
        fluxes_gaussian.add_column(MaskedColumn(name='flux_fit', data=amp2flux * fluxes_gaussian['amplitude_fit']))

        return fluxes_gaussian, imag, scaled_im
Ejemplo n.º 31
0
def find_ghost_centers(fnlist, tolerance=3, thresh=4, guess=None):
    """This routine finds the most likely optical center for a series of images
    by attempting to match ghost reflections of stars with their stellar
    counterparts. It can be rather slow if the fields are very dense with
    stars, but is also much more likely to succeed in locating the correct
    center in dense fields.

    Images should have already been aperture masked at this point. If they
    haven't, run 'aperture_mask' on them first.

    It is useful if the images have had their seeing FWHMs measured. If they
    have, these values (in pixels) should be in the fits headers as "FPFWHM".
    If not, the FWHM is assumed to be 5 pixels (and this is not ideal).

    The routine creates a number of temporary files while running, which are
    deleted at the end of the routine. Interrupting the routine while running
    is probably not a great idea.

    The routine returns a list of image centers as well as appending these to
    the fits headers as "fpxcen" and "fpycen"

    Inputs:
    fnlist -> List of strings, each one containing the path to a fits image.
              The images should be roughly aligned to one another or the
              routine will not work.
    tolerance -> Optional. How close two pixels can be and be "close enough"
                 Default is 3 pixels.
    thresh -> Optional. Level above sky background variation to look for objs.
              Default is 4.5 (times SkySigma). Decrease if center positions
              aren't being found accurately. Increase for crowded fields to
              decrease computation time.
    guess -> Optional. If you already have an idea of where the center should
             be, you'd put it here. Should be a 2-long iterable, with guess[0]
             being the X center guess, and guess[1] being the y center guess.

    Outputs:
    xcenlist -> List of image center X coordinates
    ycenlist -> List of image center Y coordinates

    """

    # Get image FWHMs
    fwhm = np.empty(len(fnlist))
    firstimage = FPImage(fnlist[0])
    toggle = firstimage.fwhm
    axcen = firstimage.axcen
    aycen = firstimage.aycen
    arad = firstimage.arad
    firstimage.close()
    if axcen is None:
        exit("Error! Images have not yet been aperture-masked! Do this first!")
    if toggle is None:
        print "Warning: FWHMs have not been measured!"
        print "Assuming 5 pixel FWHM for all images."
        fwhm = 5.*np.ones(len(fnlist))
    else:
        for i in range(len(fnlist)):
            image = FPImage(fnlist[i])
            fwhm[i] = image.fwhm
            image.close()

    # Get sky background levels
    skyavg = np.empty(len(fnlist))
    skysig = np.empty(len(fnlist))
    for i in range(len(fnlist)):
        image = FPImage(fnlist[i])
        skyavg[i], skysig[i], _skyvar = image.skybackground()
        image.close()

    # Identify the stars in each image
    xlists = []
    ylists = []
    maglists = []
    print "Identifying stars and ghosts in each image..."
    for i in range(len(fnlist)):
        xlists.append([])
        ylists.append([])
        maglists.append([])
        image = FPImage(fnlist[i])
        axcen = image.axcen
        aycen = image.aycen
        arad = image.arad
        sources = daofind(image.inty-skyavg[i],
                          fwhm=fwhm[i],
                          threshold=thresh*skysig[i]).as_array()
        for j in range(len(sources)):
            # Masks for center and edge of image
            cenmask = ((sources[j][1]-axcen)**2 +
                       (sources[j][2]-aycen)**2 > (0.05*arad)**2)
            edgemask = ((sources[j][1]-axcen)**2 +
                        (sources[j][2]-aycen)**2 < (0.95*arad)**2)
            if np.logical_and(cenmask, edgemask):
                xlists[i].append(sources[j][1])
                ylists[i].append(sources[j][2])
                maglists[i].append(sources[j][-1])
        image.close()

    if guess is None:
        # Use the found stars to come up with a center guess for each image
        xcen = np.zeros(len(fnlist))
        ycen = np.zeros(len(fnlist))
        goodcen = np.zeros(len(fnlist))
        for i in range(len(fnlist)):
            N = len(xlists[i])
            xcenarray = np.zeros(((N*N-N)/2))
            ycenarray = np.zeros(((N*N-N)/2))
            dist2array = np.zeros(((N*N-N)/2))
            sub1array = np.zeros(((N*N-N)/2))
            index = 0
            # All "possible" centers for all possible pairs of stars
            for j in range(N):
                for k in range(j+1, N):
                    xcenarray[index] = xlists[i][j]+xlists[i][k]
                    ycenarray[index] = ylists[i][j]+ylists[i][k]
                    index = index+1
            xcenarray, ycenarray = 0.5*xcenarray, 0.5*ycenarray
            # Cross check the various possible centers against each other
            for j in range(len(xcenarray)):
                dist2array = ((xcenarray-xcenarray[j])**2 +
                              (ycenarray-ycenarray[j])**2)
                sub1array[j] = np.sum(dist2array < tolerance**2)
            # Determine the locations of the "best" centers.
            bestcenloc = np.where(sub1array == max(sub1array))[0]
            # Now cross check JUST the best ones against each other
            sub1array = np.zeros(len(bestcenloc))
            xcenarray = xcenarray[bestcenloc]
            ycenarray = ycenarray[bestcenloc]
            for j in range(len(bestcenloc)):
                dist2array = ((xcenarray-xcenarray[j])**2 +
                              (ycenarray-ycenarray[j])**2)
                sub1array[j] = np.sum(dist2array < tolerance**2)
            # Again, determine the locations of the "best" centers.
            bestcenloc = np.where(sub1array == max(sub1array))[0]
            xcen[i] = np.average(xcenarray[bestcenloc])
            ycen[i] = np.average(ycenarray[bestcenloc])

        # Cross-check the various image's centers against each other
        for i in range(len(fnlist)):
            dist2array = (xcen-xcen[i])**2 + (ycen-ycen[i])**2
            goodcen[i] = np.sum(dist2array < tolerance**2)

        # Determine where in the arrays the best fitting centers are
        bestcenloc = np.where(goodcen == max(goodcen))[0]
        bestxcen = np.average(xcen[bestcenloc])
        bestycen = np.average(ycen[bestcenloc])

    else:
        # Forced guess:
        bestxcen, bestycen = guess[0], guess[1]

    # Now we want to improve the center for each image using the best guesses
    xcenlist = np.zeros(len(fnlist))
    ycenlist = np.zeros(len(fnlist))
    objxs = []
    objys = []
    ghoxs = []
    ghoys = []
    for i in range(len(fnlist)):
        # Where would the reflected objects be if they exist
        refxlist = 2.*bestxcen - np.array(xlists[i])
        refylist = 2.*bestycen - np.array(ylists[i])
        # Populate lists of objects and ghosts based on this
        objxs.append([])
        objys.append([])
        ghoxs.append([])
        ghoys.append([])
        for j in range(len(xlists[i])):
            dist2list = ((xlists[i] - refxlist[j])**2 +
                         (ylists[i] - refylist[j])**2)
            matchlist = dist2list < tolerance**2
            if np.sum(matchlist) >= 1:
                # We found a match! Now we need to know where the match is
                matchloc = np.where(matchlist == 1)[0][0]
                # Cool, now, is this thing brighter than the ghost?
                if maglists[i][matchloc] < maglists[i][j]:
                    # It is! Record it:
                    objxs[i].append(xlists[i][matchloc])
                    objys[i].append(ylists[i][matchloc])
                    ghoxs[i].append(xlists[i][j])
                    ghoys[i].append(ylists[i][j])
        # Calculate the centers based on the object / ghost coords
        if len(objxs[i]) == 0:
            xcenlist[i] = 0
            ycenlist[i] = 0
        else:
            xcenlist[i] = 0.5*(np.average(objxs[i])+np.average(ghoxs[i]))
            ycenlist[i] = 0.5*(np.average(objys[i])+np.average(ghoys[i]))

    # Fill in the blanks with a "best guess"
    xcenlist[xcenlist == 0] = np.average(xcenlist[xcenlist != 0])
    ycenlist[ycenlist == 0] = np.average(ycenlist[ycenlist != 0])
    xcenlist[np.isnan(xcenlist)] = bestxcen
    ycenlist[np.isnan(ycenlist)] = bestycen

    # Append the values to the image headers
    for i in range(len(fnlist)):
        image = FPImage(fnlist[i], update=True)
        image.xcen = xcenlist[i]
        image.ycen = ycenlist[i]
        image.close()

    # Manually verify ghost centers
    while True:
        yn = raw_input("Manually verify ghost centers? (Recommended) (y/n) ")
        if "n" in yn or "N" in yn:
            break
        elif "y" in yn or "Y" in yn:
            goodtog = verify_center(fnlist,
                                    objxs, objys,
                                    ghoxs, ghoys,
                                    xcenlist, ycenlist)
            if goodtog:
                break
            else:
                exit("Centers not approved!")

    return xcenlist, ycenlist
    starCatalog = starCatalog[np.where(theseStars)]

    # Form a "catalog" of position entries for matching
    ra1      = starCatalog['RAJ2000']
    dec1     = starCatalog['DEJ2000']
    catalog1 = SkyCoord(ra = ra1, dec = dec1, frame = 'fk5')
    
    # Read in the image and find tars in the image
    Ifile = (stokesDir + delim +
             '_'.join([thisTarget, thisWaveband, 'I']) + '.fits')
    stokesI   = Image(Ifile)
    mean, median, std = sigma_clipped_stats(stokesI.arr, sigma=3.0, iters=5)
    threshold = median + 3.0*std
    fwhm    = 3.0
    sources = daofind(stokesI.arr, threshold, fwhm, ratio=1.0, theta=0.0,
                      sigma_radius=1.5, sharplo=0.2, sharphi=1.0,
                      roundlo=-1.0, roundhi=1.0, sky=0.0,
                      exclude_border=True)
    
    # Convert source positions to RA and Dec
    wcs      = WCS(stokesI.header)
    ADstars  = wcs.all_pix2world(sources['xcentroid'], sources['ycentroid'], 0)
    catalog2 = SkyCoord(ra = ADstars[0]*u.deg, dec = ADstars[1]*u.deg, frame = 'fk5')
    
    
    ###
    ### This slow, meat-axe method was useful for verification.
    ### It produces the same results as the method below.
    ###
#    # Loop through each of the detected sources, and check for possible confusion
#    keepStars    = []
#    numCat1Match = []
Ejemplo n.º 33
0
### test segmentation
import numpy as np
from photutils import datasets
hdu = datasets.load_star_image()
data = hdu.data[0:400, 0:400]
image = hdu.data.astype(float)
image -= np.median(image)

from photutils import daofind
from astropy.stats import mad_std
from astropy.stats import sigma_clipped_stats
bkg_sigma = mad_std(image)
mean, median, std = sigma_clipped_stats(data, sigma=3.0, iters=5)
print(mean, median, std)
sources = daofind(image, fwhm=4.0, threshold=3.0*bkg_sigma)
print(sources)

from photutils import CircularAperture
from astropy.visualization import SqrtStretch
from astropy.visualization.mpl_normalize import ImageNormalize
import matplotlib.pylab as plt
positions = (sources['xcentroid'], sources['ycentroid'])
apertures = CircularAperture(positions, r=4.)
norm = ImageNormalize(stretch=SqrtStretch())
plt.imshow(data, cmap='Greys', origin='lower', norm=norm)
apertures.plot(color='blue', lw=1.5, alpha=0.5)

#
from photutils.datasets import make_100gaussians_image
data = make_100gaussians_image()
Ejemplo n.º 34
0
def align_norm(fnlist, tolerance=5, thresh=3.5):
    """Aligns a set of images to each other, as well as normalizing the images
    to the same average brightness.

    Both the alignment and normalization are accomplished through stellar
    photometry using the IRAF routine 'daophot'. The centroids of a handful
    of stars are found and used to run the IRAF routine 'imalign'. The
    instrumental magnitudes of the stars are used to determine by how much
    each image must be scaled for the photometry to match across images.

    The images are simply updated with their rescaled, shifted selves. This
    overwrites the previous images and adds the header keyword 'fpphot' to
    the images.

    A handful of temporary files are created during this process, which should
    all be deleted by the routine at the end. But if it is interrupted, they
    might not be.

    If the uncertainty images exist, this routine also shifts them by the same
    amounts as the intensity images, as well as updating the uncertainty values
    for both the new normalization and the uncertainties in normalizing the
    images.

    Inputs:
    fnlist -> List of strings, each the path to a fits image.
    tolerance -> How close two objects can be and still be considered the same
                 object. Default is 3 pixels.
    thresh -> Optional. Level above sky background variation to look for objs.
              Default is 3.5 (times SkySigma). Decrease if center positions
              aren't being found accurately. Increase for crowded fields to
              decrease computation time.

    """

    # Get image FWHMs
    fwhm = np.empty(len(fnlist))
    firstimage = FPImage(fnlist[0])
    toggle = firstimage.fwhm
    axcen = firstimage.axcen
    aycen = firstimage.aycen
    arad = firstimage.arad
    firstimage.close()
    if axcen is None:
        print "Error! Images have not yet been aperture-masked! Do this first!"
        crash()
    if toggle is None:
        print "Warning! FWHMs have not been measured!"
        print "Assuming 5 pixel FWHM for all images."
        for i in range(len(fnlist)):
            fwhm[i] = 5
    else:
        for i in range(len(fnlist)):
            image = FPImage(fnlist[i])
            fwhm[i] = image.fwhm
            image.close()

    # Get sky background levels
    skyavg = np.empty(len(fnlist))
    skysig = np.empty(len(fnlist))
    for i in range(len(fnlist)):
        image = FPImage(fnlist[i])
        skyavg[i], skysig[i], _skyvar = image.skybackground()
        image.close()

    # Identify the stars in each image
    xlists = []
    ylists = []
    print "Identifying stars in each image..."
    for i in range(len(fnlist)):
        xlists.append([])
        ylists.append([])
        image = FPImage(fnlist[i])
        axcen = image.axcen
        aycen = image.aycen
        arad = image.arad
        sources = daofind(image.inty-skyavg[i],
                          fwhm=fwhm[i],
                          threshold=thresh*skysig[i]).as_array()
        for j in range(len(sources)):
            # If the source is not near the center or edge
            centermask = ((sources[j][1]-axcen)**2 +
                          (sources[j][2]-aycen)**2 > (0.05*arad)**2)
            edgemask = ((sources[j][1]-axcen)**2 +
                        (sources[j][2]-aycen)**2 < (0.95*arad)**2)
            if np.logical_and(centermask, edgemask):
                xlists[i].append(sources[j][1])
                ylists[i].append(sources[j][2])
        image.close()

    # Match objects between fields
    print "Matching objects between images..."
    xcoo = []
    ycoo = []
    for i in range(len(xlists[0])):
        # For each object in the first image
        accept = True
        for j in range(1, len(fnlist)):
            # For each other image
            dist2 = ((np.array(xlists[j])-xlists[0][i])**2 +
                     (np.array(ylists[j])-ylists[0][i])**2)
            if (min(dist2) > tolerance**2):
                accept = False
                break
        if accept:
            # We found an object at that position in every image
            xcoo.append(xlists[0][i])
            ycoo.append(ylists[0][i])

    # Create coordinate arrays for the photometry and shifting
    x = np.zeros((len(fnlist), len(xcoo)))
    y = np.zeros_like(x)
    for i in range(len(xcoo)):
        # For every object found in the first image
        for j in range(len(fnlist)):
            # Find that object in every image
            dist2 = ((np.array(xlists[j])-xcoo[i])**2 +
                     (np.array(ylists[j])-ycoo[i])**2)
            index = np.argmin(dist2)
            x[j, i] = xlists[j][index]
            y[j, i] = ylists[j][index]

    # Do aperture photometry on the matched objects
    print "Performing photometry on matched stars..."
    counts = np.zeros_like(x)
    dcounts = np.zeros_like(x)
    for i in range(len(fnlist)):
        image = FPImage(fnlist[i])
        apertures = CircularAperture((x[i], y[i]), r=2*fwhm[i])
        annuli = CircularAnnulus((x[i], y[i]), r_in=3*fwhm[i], r_out=4*fwhm[i])
        phot_table = aperture_photometry(image.inty,
                                         apertures, error=np.sqrt(image.vari))
        sky_phot_table = aperture_photometry(image.inty, annuli,
                                             error=np.sqrt(image.vari))
        counts[i] = phot_table["aperture_sum"] / apertures.area()
        counts[i] -= sky_phot_table["aperture_sum"] / annuli.area()
        counts[i] *= apertures.area()
        dcounts[i] = phot_table["aperture_sum_err"] / apertures.area()
        image.close()

    # Calculate the shifts and normalizations
    norm, dnorm = calc_norm(counts, dcounts)
    for i in range(x.shape[1]):
        x[:, i] = -(x[:, i] - x[0, i])
        y[:, i] = -(y[:, i] - y[0, i])
    xshifts = np.average(x, axis=1)
    yshifts = np.average(y, axis=1)

    # Normalize the images and put shifts in the image headers
    for i in range(len(fnlist)):
        image = FPImage(fnlist[i], update=True)
        image.phottog = "True"
        image.dnorm = dnorm[i]
        image.inty /= norm[i]
        image.vari = image.vari/norm[i]**2
        image.xshift = xshifts[i]
        image.yshift = yshifts[i]
        image.close()

    return
Ejemplo n.º 35
0
		(hours, mins, secs) = ultracamutils.timedeltaHoursMinsSeconds(timeLeft)
		timeLeftString = str(hours).zfill(2) + ":" + str(mins).zfill(2) + ":" + str(secs).zfill(2)
		
		ccdFrame = rdat()
		
		statusString = "\r%s Frame: [%d/%d]"%(timeLeftString, trueFrameNumber, frameRange)
		sys.stdout.write(statusString)
		sys.stdout.flush()
		
		windows = ccdFrame[0]
		
		for windowIndex, w in enumerate(windows):
			image = w._data
			allWindows[windowIndex].addData(image)
			bkg_sigma = 1.48 * mad(image)
			sources = daofind(image, fwhm=4.0, threshold=2.5*bkg_sigma)  
			sources.pprint()
			
			filteredSources = []
				
			
			for index, s in enumerate(sources):
				newSource = {}
				new = True
				newSource = (s['xcentroid'], s['ycentroid'], s['flux'])
				if index==0:
					filteredSources.append(newSource)
				for f in filteredSources:
					if  f[0]==newSource[0] and \
						f[1]==newSource[1] and \
						f[2]==newSource[2]: 
Ejemplo n.º 36
0
    plt.plot(pixel,xx,marker='.', c='r')
'''


'''
    statistics on the image, determines background noise, standard deviation and mean photon counts
    to find all objects in the image that are above some threshold, and are NEAR some FWHM value.
    typing  "sources" without quotes in the spyde terminal will print all of the sources, x,y coords
    of their locations in the image, and a peak photon count, and corresponding flux and magnitude values 
    for the sources as well.
    
'''

mean, median, std = sigma_clipped_stats(cutdata, sigma =5.0, iters=10)

sources = daofind(cutdata - median, fwhm=targetFWHM, threshold=2.*std)

#sources = sources[(sources['peak'] > 1000.)]



#positions = SC(ra=targetRAdeg * u.deg, dec = targetDECdeg * u.deg, frame='fk5')

positions = (sources['xcentroid'], sources['ycentroid'])

apertures = CircularAperture(positions, r=4.0)

norm = ImageNormalize(stretch=SqrtStretch())


'''Uncomment these plot commands to view the image. plt.imshow plots the whole image, and apertures.plot
Ejemplo n.º 37
0
from photutils import datasets

print "Python version:", sys.version
print "Astropy version:", astropy.__version__

hdu = datasets.load_star_image()   
image = hdu.data[500:700, 500:700]   
image = hdu.data
print np.median(image)
image -= np.median(image)


from photutils import daofind
from astropy.stats import median_absolute_deviation as mad
bkg_sigma = 1.48 * mad(image)   
sources = daofind(image, fwhm=4.0, threshold=3*bkg_sigma)   
print sources

for s in sources:
	print s

figure = matplotlib.pyplot.figure(figsize=(10, 10))
matplotlib.pyplot.title("Sample image")
matplotlib.pyplot.imshow(image, cmap='gray')
#matplotlib.pyplot.gca().invert_yaxis()	
matplotlib.pyplot.show()

ax=figure.add_subplot(1,1,1)
matplotlib.pyplot.axis('off')

extent = ax.get_window_extent().transformed(figure.dpi_scale_trans.inverted())
######################################################################################################### Now let's match to the SDSS Calib Field
Field0 = ascii.read(top+'DR10_SDSS_CALIB_FIELD1_4.csv')
#Field1 = Field0[np.where(Field0['rmag'] < 18)]
Field00 = ascii.read(top+'DR10_SDSS_CALIB_FIELD2_4.csv')
#Field00 = ascii.read(top+'DR10_SDSS_CALIB_FIELD3.csv')
#Field3 = Field2[np.where(Field2['rmag'] < 18)]
######################################################################################################### Start the show

# First let's get the images and the necessary data associated with them
image1, median1, ObjName1, band1, std1, hdr1, w1 = GetImage(image0, mask)
image2, median2, ObjName2, band2, std2, hdr2, w2 = GetImage(image00, mask)
image3, median3, ObjName3, band3, std3, hdr3, w3 = GetImage(image000, mask)

# Grab sources in the first image
threshold = thres*std1
sources = ph.daofind(image1, threshold=threshold, fwhm=fwhm) 
# Filter out sources close to the edges
sX, sY = sources['xcentroid'][np.where( (sources['xcentroid']>=20) & (sources['xcentroid']<=1000) & (sources['ycentroid']>=20) & (sources['ycentroid']<=1000) )], sources['ycentroid'][np.where( (sources['xcentroid']>=20) & (sources['xcentroid']<=1000) & (sources['ycentroid']>=20) & (sources['ycentroid']<=1000) )]
Xs1 = sX
Ys1 = sY

# Grab sources in the second image
threshold = thres*std2
sources = ph.daofind(image2, threshold=threshold, fwhm=fwhm)
# Filter out sources close to the edges
sX, sY = sources['xcentroid'][np.where( (sources['xcentroid']>=20) & (sources['xcentroid']<=1000) & (sources['ycentroid']>=20) & (sources['ycentroid']<=1000) )], sources['ycentroid'][np.where( (sources['xcentroid']>=20) & (sources['xcentroid']<=1000) & (sources['ycentroid']>=20) & (sources['ycentroid']<=1000) )]
Xs2 = sX
Ys2 = sY

# Grab sources in the third image
threshold = thres*std3
Ejemplo n.º 39
0
def compute(data):
    mean, median, std = sigma_clipped_stats(data, sigma=3.0, iters=5)
    sources = daofind(data - median, fwhm=3.0, threshold=5.*std)
    return sources
Ejemplo n.º 40
0
def daofind_centroid(img, init=None, daofind_kwargs=None):
    """
    Find centroids using photutils.daofind.

    If init==None, all centroids found will be returned, along with 
    a flag indicating how many centroids were found. 

    If init is a (RA, Dec) pair of PIXEL coords, the closest centroid
    will be returned, along with a flag indicating how many centroids were 
    found. 

    Inputs
    ------
    img: array-like
        a 2-D image

    init: array-like, length 2 (optional)
        if provided, only return the source closest to the initial position

    daofind_kwargs: dict (optional)
        keyword arguments for photutils.daofind function
        default fwhm=2.5, threshold=1000

    Outputs
    -------
    coords: Table 
        xcentroid and ycentroid are the relevant columns

    num_sources:
        number of sources found. Note that if init is supplied,
        num_sources may be >1 but only one row of coords is returned

    """

    # if no daofind arguments provided, use default fwhm and background
    if daofind_kwargs is None:
        daofind_kwargs = dict()
    daofind_kwargs["fwhm"] = daofind_kwargs.get("fwhm", 2.5)
    daofind_kwargs["threshold"] = daofind_kwargs.get("threshold", 1e3)

    # find sources
    sources = photutils.daofind(img, **daofind_kwargs)

    num_sources = len(sources)
    logging.debug("%d sources", num_sources)
    logging.debug(sources)

    # if an initial position is provided, only return the closest source
    if (init is not None) and (num_sources>1):
        ra, dec = init
        sep = np.sqrt((sources["xcentroid"] - ra)**2 + 
                      (sources["ycentroid"] - dec)**2)
        loc = np.argmin(sep)
        coords = sources[loc]

    elif (init is not None) and (num_sources==1):
        coords = sources[0]

    else:
        coords = sources
    
    return coords, num_sources
Ejemplo n.º 41
0
# blue mosaic
b_nan = np.isnan(mosaic_b)
b_img_raw = mosaic_b
b_img_raw[b_nan] = 1
b_img = b_img_raw[175:1255, 125:1205]

sig_scal = 14
FWHM = 4
ap_r = 6.0
ap_v = 6.0
ap_b = 6.0

#background for red filter
sig_r = np.std(r_img[165:195, 330:360])  # no stars
source_r = daofind(r_img, fwhm=FWHM, threshold=sig_scal * sig_r)

#background for vis filter
sig_v = np.std(v_img[165:195, 330:360])  # no stars
source_v = daofind(v_img, fwhm=FWHM, threshold=sig_scal * sig_v)

#background for blue filter
sig_b = np.std(b_img[165:195, 330:360])  # no stars
source_b = daofind(b_img, fwhm=FWHM, threshold=sig_scal * sig_b)

# position of each star
r_position = zip(source_r['xcentroid'], source_r['ycentroid'])
v_position = zip(source_v['xcentroid'], source_v['ycentroid'])
b_position = zip(source_b['xcentroid'], source_b['ycentroid'])

# aperatures around stars
Ejemplo n.º 42
0
ground_truth = make_random_gaussians(num_sources, [min_flux, max_flux],
                                     [min_xmean, max_xmean],
                                     [min_xmean, max_xmean],
                                     [sigma_psf, sigma_psf],
                                     [sigma_psf, sigma_psf],
                                     random_state=123)
shape = (256, 256)
image = (make_gaussian_sources(shape, ground_truth) +
         make_noise_image(shape, type='poisson', mean=1., random_state=123))

ground_truth.write('input.html')
# estimate background as the median after sigma clipping the sources
_, bkg, std = sigma_clipped_stats(image, sigma=3.0, iters=5)

# find potential sources with daofind
sources = daofind(image - bkg, threshold=4.0*std,
                  fwhm=sigma_psf*gaussian_sigma_to_fwhm)
intab = Table(names=['id', 'x_0', 'y_0', 'flux_0'],
              data=[sources['id'], sources['xcentroid'],
                    sources['ycentroid'], sources['flux']])
intab.write('intab.html')

groups = daogroup(intab, crit_separation=2.0*sigma_psf*gaussian_sigma_to_fwhm)

plt.subplot(1, 2, 1)
plt.imshow(image, origin='lower', interpolation='nearest')
plt.title('Simulated data')
plt.xlabel('x-position (pixel units)')
plt.ylabel('y-position (pixel units)')

plt.subplot(1, 2, 2)
for i in range(len(groups)):
Ejemplo n.º 43
0
from photutils import daofind
from astropy.stats import sigma_clipped_stats
from photutils import find_peaks

DO_TEST = False

if (DO_TEST):
    
    num = 200 # Number of brightest objects to keep
    
    image_s = hbt.remove_sfit(image,4)
    
    mean, median, std = sigma_clipped_stats(image, sigma=3.0, iters=5)
        
    sources = daofind(image, fwhm=2.0, threshold=2.*std)
    
    threshold = median + (10.0 * std) # Ten sigma
    tbl = find_peaks(image, threshold, box_size=5)
    
    sources.sort('flux')  # Sort in-place
    tbl.sort('peak_value')
        
    if (num > 0):  
        index_start = -num
    else:
        index_start = 0
            
    x_phot = np.array(sources['xcentroid'][index_start:].data)
    y_phot = np.array(sources['ycentroid'][index_start:].data)
    
Ejemplo n.º 44
0
    pickle.dump(cat, lun)
    lun.close()
    print("Wrote: " + file_stars_pkl)
        
#==============================================================================
# Do photometry on the NH image
#==============================================================================

# Use DAOphot to search the image for stars. It works really well.
# However, it returns position only -- no magnitudes.

#if (sequence == 'MVIC_D211'):
#    im = im[:,0:350]
    
mean, median, std = sigma_clipped_stats(im, sigma=3.0, iters=5)
sources = daofind(im - median, fwhm=4.0, threshold=2.*std)

x_phot = sources['xcentroid']
y_phot = sources['ycentroid']
flux   = sources['flux']

points_phot = np.transpose((y_phot, x_phot, flux)) # Create an array N x 2

# Sort them, bright to faint

order = (np.argsort(points_phot[:,2]))[::-1] # Sort from brightest to faintest
points_phot = points_phot[order]

#==============================================================================
# For each star in the catalog, go and see if there is a star we found with a center within, say, 2 pixels.
#==============================================================================
Ejemplo n.º 45
0
    dec1 = starCatalog['DEJ2000']
    catalog1 = SkyCoord(ra=ra1, dec=dec1, frame='fk5')

    # Read in the image and find tars in the image
    Ifile = (stokesDir + delim + '_'.join([thisTarget, thisWaveband, 'I']) +
             '.fits')
    stokesI = Image(Ifile)
    mean, median, std = sigma_clipped_stats(stokesI.arr, sigma=3.0, iters=5)
    threshold = median + 3.0 * std
    fwhm = 3.0
    sources = daofind(stokesI.arr,
                      threshold,
                      fwhm,
                      ratio=1.0,
                      theta=0.0,
                      sigma_radius=1.5,
                      sharplo=0.2,
                      sharphi=1.0,
                      roundlo=-1.0,
                      roundhi=1.0,
                      sky=0.0,
                      exclude_border=True)

    # Convert source positions to RA and Dec
    wcs = WCS(stokesI.header)
    ADstars = wcs.all_pix2world(sources['xcentroid'], sources['ycentroid'], 0)
    catalog2 = SkyCoord(ra=ADstars[0] * u.deg,
                        dec=ADstars[1] * u.deg,
                        frame='fk5')

    ###
    ### This slow, meat-axe method was useful for verification.
Ejemplo n.º 46
0
      where_are_NaNs = np.isnan(image)
      image[where_are_NaNs] = 0.
  
      header = hdu[0] 
      RA[i] = header.header['RA']
      DEC[i] = header.header['DEC']
      MJD[i] = header.header['MJD-OBS']
      wcs = pywcs.WCS(hdu[0].header)

      hdu_tbl  = pyfits.open(tables[i])
      header_tbl = hdu_tbl[0] 
      MJD_tbl[i] = header_tbl.header['MJD-OBS']

    
      thr = 15.
      junk =  daofind(image, fwhm=5.0, threshold=thr) 
      len_junk = len(junk)
      while len_junk > 80:
          thr=thr+5.
          junk =  daofind(image, fwhm=5.0, threshold=thr) 
          len_junk = len(junk)
        
      if len(junk) <= 5: 
         junk =  daofind(image, fwhm=5.0, threshold=10.) 
      if len(junk) <= 5: 
         junk =  daofind(image, fwhm=5.0, threshold=5.) 

      try:
          x=np.array(junk['xcentroid'])
          y=np.array(junk['ycentroid'])
          id_=np.array(junk['id'])
Ejemplo n.º 47
0
  def extension(self, extension_idx, threshold='', FWHM=3.0, sigma=3.0, snr=50., plot=False):
    '''
    A method to run aperatue photometry routines on an individual extension and save the results to the exposure class
    
    Parameters
    ----------
    extension_idx: int
      Index of the extension
    threshold: float (optional)
      The absolute image value above which to select sources
    FWHM: float
      The full width at half maximum
    sigma: float
      Number of standard deviations to use for background estimation
    snr: float
      The signal-to-noise ratio to use in the threshold detection
    plot: bool
      Plot the field with identified sources circled      

    Returns
    -------
    source_list: table
      A source list for the image

    '''

    # Define the data array
    data = self.hdulist[extension_idx].data.astype(np.float)
    
    # Extract the header and create a WCS object
    hdr = self.hdulist[extension_idx].header
    wcs = WCS(hdr)

    # Estimate the background and background noise
    mean, median, std = sigma_clipped_stats(data, sigma=sigma, iters=5)

    # Calculate the detection threshold and FWHM if not provided
    if not threshold: threshold = np.mean(detect_threshold(data, snr=snr))
    
    # Print the parameters being used
    for p,v in zip(['mean','median','std','threshold','FWHM'],[mean,median,std,threshold,FWHM]): print '{!s:10}: {:.3f}'.format(p,v)

    # Subtract background and generate sources list of all detections
    sources = daofind(data-median, threshold, FWHM)
    
    # Map RA and Dec to pixels
    positions = (sources['xcentroid'], sources['ycentroid'])
    skycoords = pixel_to_skycoord(*positions, wcs=wcs)
    
    # Calculate magnitudes at given source positions
    apertures = CircularAperture(positions, r=2.)
    photometry_table = aperture_photometry(data, apertures)
    
    # 'skycoords' IRCS object is problematic for stacking tables so for now we'll just add the ra and dec
    # photometry_table['sky_center'] = skycoords
    photometry_table['ra'], photometry_table['dec'] = skycoords.ra, skycoords.dec
    
    # Update data in the exposure object
    self.source_table = vstack([self.source_table,photometry_table], join_type='inner')  
    
    # Plot the sources
    if plot:
      norm = ImageNormalize(stretch=SqrtStretch())
      plt.imshow(data, cmap='Greys', origin='lower', norm=norm)
      apertures.plot(color='blue', lw=1.5, alpha=0.5)
    
    print '{!s:10}: {}'.format('sources',len(sources))
Ejemplo n.º 48
0
srcs = sep.extract(sim.bkg_sub_img, thresh=12*sim.bkg.globalrms)
posflux = srcs[['x','y', 'flux']]

psf_guess = psf.IntegratedGaussianPRF(flux=1, sigma=8)

psf_guess.flux.fixed = False
psf_guess.x_0.fixed = False
psf_guess.y_0.fixed = False
psf_guess.x_0.sigma = True

fitshape = (64,64)
intab = Table(names=['x_0', 'y_0', 'flux_0'], data=posflux)
#subimi = psf.subtract_psf(sim.bkg_sub_img, psf_guess, posflux)

outtabi = psf.psf_photometry(sim.bkg_sub_img, intab, psf_guess, fitshape,
    store_fit_info=True)
outtabi['flux_input'] = intab['flux_0']

# with daofind there are lots of garbage
found = daofind(sim.bkg_sub_img, threshold=5*sim.bkg.globalrms, fwhm=10,
    exclude_border=True)
intab2 = Table(names=['x_0', 'y_0', 'flux_0'], data=[found['xcentroid'],
    found['ycentroid'], found['flux']])
outtabi2 = psf.psf_photometry(sim.bkg_sub_img, intab2, psf_guess, fitshape,
    store_fit_info=True)
outtabi2['flux_input'] = intab2['flux_0']



Ejemplo n.º 49
0
def daofind(imagename, ext=1, outname='default', threshold=3., fwhm=1.5, \
            ratio=1.0, theta=0.0, sigma_radius=1.5, sharplo=0.2, \
            sharphi=1.0, roundlo=-1.0, roundhi=1.0, sky=0.0, \
            exclude_border=True, use_bkgrd=True):
    """ Extends `photutils.daofind`, so that outputs a coordinate file
    with my formatting from `photutils_plus.phot_tools.write_out_photfile`.
    (Later, if I'm clever, use inheritance.)

    Parameters
    ----------
    imagename : string
        Name of the FITS file.
    ext : int
        The extension of the FITS file to read.
    outname : string
        Name of the output coordinate file. If "default", becomes,
        "<imagename>.coo."
    threshold : int
        Threshold for search. Default of "3."
    fwhm : float
        The full width half max. Default of "1.5."
    ratio : float
        Default of "1.0."
    theta : float 
        Default of "0.0."
    sigma_radius : float
        Default of 1.5."
    sharplo : float
        Default of "0.2."
    sharphi : float
        Default of "1.0."
    roundlo : float
        Default of "-1.0."
    roundhi : float
        Default of "1.0."
    sky : float
        Default of "0.0."
    exclude_border : {True, Flase}
        When True, masks out a 10-pixel border on image from search.
    use_bkgrd : {True, False}
        Setting on will multiply the background by the threshold to
        obtain a new threshold value. 

    Returns
    -------
    coo_tab : astropy.Table
        Table containing the coordinates.

    """
    # Fetch function metadata.
    current_params = locals()
    func_name = sys._getframe().f_code.co_name #daofind.__name__

    # Read in FITS file.
    hdulist = fits.open(imagename)
    data = hdulist[ext].data

    # Mask 10 pixels from border. I find the `exclude_border` in 
    # photutils.daofind inadiquate. 
    if exclude_border:
        data[:,0:10] = -99999.0
        data[:,-10:] = -99999.0
        data[0:10,:] = -99999.0
        data[-10:,:] = -99999.0

    # Get the background.
    if use_bkgrd:
        bkg_sigma = mad_std(data) 
        threshold = threshold * bkg_sigma 

    coo_tab = photutils.daofind(data=data,
                                threshold=threshold,
                                fwhm=fwhm,
                                ratio=ratio,
                                theta=theta,
                                sigma_radius=sigma_radius,
                                sharplo=sharplo,
                                sharphi=sharphi,
                                roundlo=roundlo,
                                roundhi=roundhi,
                                sky=sky,
                                exclude_border=exclude_border)

    # ??Insert background value into coo_tab??

    # Create basename for out file.
    if outname == 'default':
        baseoutname = imagename + '.coo'
    else:
        baseoutname = outname

    # Check whether file already exists. If yes, append number.
    fileoutname = baseoutname
    i=1
    while os.path.isfile(fileoutname):
        fileoutname = baseoutname
        fileoutname += ('.' + str(i))
        i += 1

    write_out_photfile(fileoutname, coo_tab, current_params, func_name)

    return coo_tab
Ejemplo n.º 50
0
for i in tqdm(range(len(stardata) - 2)):
    xx = stardata[i,:]
    plt.plot(pixel,xx,marker='.', c='r')
'''
'''
    statistics on the image, determines background noise, standard deviation and mean photon counts
    to find all objects in the image that are above some threshold, and are NEAR some FWHM value.
    typing  "sources" without quotes in the spyde terminal will print all of the sources, x,y coords
    of their locations in the image, and a peak photon count, and corresponding flux and magnitude values 
    for the sources as well.
    
'''

mean, median, std = sigma_clipped_stats(cutdata, sigma=5.0, iters=10)

sources = daofind(cutdata - median, fwhm=targetFWHM, threshold=2. * std)

#sources = sources[(sources['peak'] > 1000.)]

#positions = SC(ra=targetRAdeg * u.deg, dec = targetDECdeg * u.deg, frame='fk5')

positions = (sources['xcentroid'], sources['ycentroid'])

apertures = CircularAperture(positions, r=4.0)

norm = ImageNormalize(stretch=SqrtStretch())
'''Uncomment these plot commands to view the image. plt.imshow plots the whole image, and apertures.plot
   plots circles around all of the things in the image that have been determined to be sources.'''
#plt.imshow(cutdata, cmap='Spectral', origin='lower', norm=norm)
#apertures.plot(color='blue',lw=1.5, alpha=0.5)