def sep_phot(data, ap, th): """ Preforms photometry by SEP, similar to source extractor """ # Measure a spatially variable background of some image data (np array) try: bkg = sep.Background(data) # , mask=mask, bw=64, bh=64, fw=3, fh=3) # optional parameters except ValueError: data = data.byteswap(True).newbyteorder() bkg = sep.Background(data) # , mask=mask, bw=64, bh=64, fw=3, fh=3) # optional parameters # Directly subtract the background from the data in place bkg.subfrom(data) # for the background subtracted data, detect objects in data given some threshold thresh = th * bkg.globalrms # ensure the threshold is high enough wrt background objs = sep.extract(data, thresh) # calculate the Kron radius for each object, then we perform elliptical aperture photometry within that radius kronrad, krflag = sep.kron_radius(data, objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], ap) flux, fluxerr, flag = sep.sum_ellipse(data, objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], 2.5 * kronrad, subpix=1) flag |= krflag # combine flags into 'flag' r_min = 1.75 # minimum diameter = 3.5 use_circle = kronrad * np.sqrt(objs['a'] * objs['b']) < r_min x = objs['x'] y = objs['y'] cflux, cfluxerr, cflag = sep.sum_circle(data, x[use_circle], y[use_circle], r_min, subpix=1) flux[use_circle] = cflux fluxerr[use_circle] = cfluxerr flag[use_circle] = cflag return objs
def sep_phot(exp_data, asteroid_id, ap=10.0): """ Measure background of postage stamp and the flux_err of the asteroid """ data2 = np.ones(exp_data.shape) * exp_data # np.copyto(data2, exp_data) try: bkg = sep.Background(data2) except ValueError: data3 = data2.byteswap(True).newbyteorder() bkg = sep.Background(data3) # Directly subtract the background from the data in place bkg.subfrom(data2) # calculate the Kron radius for each object, then we perform elliptical aperture photometry within that radius kronrad, krflag = sep.kron_radius(data2, asteroid_id[_XMID_HEADER], asteroid_id[_YMID_HEADER], asteroid_id[_A_HEADER], asteroid_id['b'], asteroid_id[_THETA_HEADER], ap) flux, fluxerr, flag = sep.sum_ellipse(data2, asteroid_id[_XMID_HEADER], asteroid_id[_YMID_HEADER], asteroid_id[_A_HEADER], asteroid_id[_B_HEADER], asteroid_id[_THETA_HEADER], 2.5 * kronrad, subpix=1, err=bkg.globalrms) return bkg.globalback, flux, fluxerr
def AperaturePhoto(self,filter,objs): print 'Running Aperature Photometry on %s......'%filter kronrad, krflag = sep.kron_radius(self.dataList[filter], objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], 6.0) flux, fluxerr, flag = sep.sum_ellipse(self.dataList[filter], objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], 2.5*kronrad, subpix=1, err=self.bkgRMS[filter]) #use circular aperature photometry if the kronradius is too small. see http://sep.readthedocs.org/en/v0.2.x/apertures.html r_min = 1.75 # minimum diameter = 3.5 use_circle = kronrad * np.sqrt(objs['a']*objs['b']) < r_min cflux, cfluxerr, cflag = sep.sum_circle(self.dataList[filter], objs['x'][use_circle], objs['y'][use_circle], r_min, subpix=1,err=self.bkgRMS[filter]) flux[use_circle] = cflux fluxerr[use_circle] = cfluxerr flag[use_circle] = cflag #convert flux to magnitudes using the appropriate zeropoint #absolute flux measurement (AB for Z-PEG) mag = -2.5*np.log10(flux)+self.zeroPoints[filter] #calculate magerr fluxdown = flux - fluxerr fluxup = flux + fluxerr magup = -2.5*np.log10(fluxdown) + self.zeroPoints[filter] magdown = -2.5*np.log10(fluxup) + self.zeroPoints[filter] magerr = ((magup - mag) + (mag-magdown))/2. return mag, magerr
def catalog(ccd, bgf, catf, db, config, logger): bg = fits.open(bgf) im = ccd.data - bg['background'].data ps = ccd.meta['SCALE'] * ccd.meta.get('REBIN', 1) bgrms = bg['background'].header['bgrms'] objects = sep.extract(im, 2, err=bgrms, mask=ccd.mask) logger.info('Found {} sources.'.format(len(objects))) rap = max(ccd.meta['SEEING'] * 2, 5 / ps) flux, fluxerr, flag = sep.sum_circle(im, objects['x'], objects['y'], rap, err=bgrms) # avoid theta rounding error theta = np.maximum(np.minimum(objects['theta'], np.pi / 2.00001), -np.pi / 2.00001) kronrad, krflag = sep.kron_radius(im, objects['x'], objects['y'], objects['a'], objects['b'], theta, 6.0) krflux, krfluxerr, _flag = sep.sum_ellipse(im, objects['x'], objects['y'], objects['a'], objects['b'], theta, 2.5 * kronrad, subpix=1, err=bgrms) krflag |= _flag # an additional background estimate, which should help when there are # large extended sources in scene: IN TESTS, THIS DID NOT AFFECT RESULTS # for i in range(len(objects)): # krflux[i], krfluxerr[i] = bg_subtract2(im, objects[i], krflux[i], # krfluxerr[i]) # flux[i], fluxerr[i] = bg_subtract2(im, objects[i], flux[i], # fluxerr[i], r=rap) if ccd.wcs.wcs.crval[0] == ccd.wcs.wcs.crval[1]: ra, dec = np.zeros((2, len(objects))) else: ra, dec = ccd.wcs.all_pix2world(objects['x'], objects['y'], 0) tab = Table( (objects['x'], objects['y'], ra, dec, flux, fluxerr, flag, objects['a'], objects['b'], theta, kronrad, krflux, krfluxerr, krflag), names=('x', 'y', 'ra', 'dec', 'flux', 'fluxerr', 'flag', 'a', 'b', 'theta', 'kronrad', 'krflux', 'krfluxerr', 'krflag')) hdu = fits.HDUList() hdu.append(fits.BinTableHDU(tab, name='cat')) hdu['cat'].header['RADIUS'] = (rap * ps, 'aperture photometry radius, arcsec') hdu.writeto(catf, overwrite=True)
def _measure(self, img, sources, mask=None): logger.info('measuring source parameters') # HACK: issues with numerical precision # must have pi/2 <= theta <= npi/2 sources[np.abs(np.abs(sources['theta']) - np.pi/2) < 1e-6] = np.pi/2 for p in ['x', 'y', 'a', 'b', 'theta']: sources = sources[~np.isnan(sources[p])] # calculate "AUTO" parameters kronrad, krflag = sep.kron_radius( img, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 6.0, mask=mask) flux, fluxerr, flag = sep.sum_ellipse( img, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 2.5*kronrad, subpix=5, mask=mask) flag |= krflag # combine flags into 'flag' sources = sources[~np.isnan(flux)] flux = flux[~np.isnan(flux)] sources = sources[flux > 0] flux = flux[flux > 0] mag_auto = utils.zpt - 2.5*np.log10(flux) r, flag = sep.flux_radius( img, sources['x'], sources['y'], 6.*sources['a'], 0.5, normflux=flux, subpix=5, mask=mask) sources['mag_auto'] = mag_auto sources['flux_auto'] = flux sources['flux_radius'] = r * utils.pixscale # approximate fwhm r_squared = sources['a']**2 + sources['b']**2 sources['fwhm'] = 2 * np.sqrt(np.log(2) * r_squared) * utils.pixscale q = sources['b'] / sources['a'] area = np.pi * q * sources['flux_radius']**2 sources['mu_ave_auto'] = sources['mag_auto'] + 2.5 * np.log10(2*area) area_arcsec = np.pi * (self.psf_fwhm/2)**2 * utils.pixscale**2 flux, fluxerr, flag = sep.sum_circle( img, sources['x'], sources['y'], self.psf_fwhm/2, subpix=5, mask=mask) flux[flux<=0] = np.nan mu_0 = utils.zpt - 2.5*np.log10(flux / area_arcsec) sources['mu_0_aper'] = mu_0 return sources
def sextractor(im,err=None,mask=None,nsig=5.0,gain=1.0): # Check byte order, SEP needs little endian if im.dtype.byteorder == '>': data = im.byteswap().newbyteorder() else: data = im # Background estimation and subtraction bkg = sep.Background(data, mask, bw=256, bh=256, fw=3, fh=3) bkg_image = bkg.back() data_sub = data-bkg #data_sub[data>50000]=0.0 # Detect and extract objects if err is None: objects = sep.extract(data_sub, nsig, err=bkg.globalrms, mask=mask) else: objects = sep.extract(data_sub, nsig, err=err, mask=mask) # Get mag_auto in 2 steps kronrad, krflag = sep.kron_radius(data_sub, objects['x'], objects['y'], objects['a'], objects['b'], objects['theta'], 6.0, mask=mask) flux, fluxerr, flag = sep.sum_ellipse(data_sub, objects['x'], objects['y'], objects['a'], objects['b'], objects['theta'], 2.5*kronrad, subpix=1, err=err, mask=mask, gain=gain) flag |= krflag # combine flags into 'flag' # Use circular aperture if Kron radius is too small r_min = 1.75 # minimum diameter = 3.5 use_circle = kronrad * np.sqrt(objects['a'] * objects['b']) < r_min if np.sum(use_circle)>0: cflux, cfluxerr, cflag = sep.sum_circle(data_sub, objects['x'][use_circle], objects['y'][use_circle], r_min, subpix=1, err=err, mask=mask, gain=gain) flux[use_circle] = cflux fluxerr[use_circle] = cfluxerr flag[use_circle] = cflag mag_auto = -2.5*np.log10(flux)+25.0 magerr_auto = 1.0857*fluxerr/flux # Make the final catalog newdt = np.dtype([('kronrad',float),('flux_auto',float),('fluxerr_auto',float),('mag_auto',float),('magerr_auto',float)]) cat = dln.addcatcols(objects,newdt) cat['flag'] |= flag cat['kronrad'] = kronrad cat['flux_auto'] = flux cat['fluxerr_auto'] = fluxerr cat['mag_auto'] = mag_auto cat['magerr_auto'] = magerr_auto return cat
def _get_flux_auto(self, objs): flux_auto = np.zeros(objs.size) - 9999.0 fluxerr_auto = np.zeros(objs.size) - 9999.0 flux_radius = np.zeros(objs.size) - 9999.0 kron_radius = np.zeros(objs.size) - 9999.0 w, = np.where((objs['a'] >= 0.0) & (objs['b'] >= 0.0) & (objs['theta'] >= -np.pi / 2.) & (objs['theta'] <= np.pi / 2.)) if w.size > 0: kron_radius[w], krflag = sep.kron_radius( self.image, objs['x'][w], objs['y'][w], objs['a'][w], objs['b'][w], objs['theta'][w], 6.0, ) objs['flag'][w] |= krflag aper_rad = 2.5 * kron_radius flux_auto[w], fluxerr_auto[w], flag_auto = \ sep.sum_ellipse( self.image, objs['x'][w], objs['y'][w], objs['a'][w], objs['b'][w], objs['theta'][w], aper_rad[w], subpix=1, ) objs['flag'][w] |= flag_auto flux_radius[w], frflag = sep.flux_radius( self.image, objs['x'][w], objs['y'][w], 6. * objs['a'][w], PHOT_FLUXFRAC, normflux=flux_auto[w], subpix=5, ) objs['flag'][w] |= frflag # combine flags into 'flag' return flux_auto, fluxerr_auto, flux_radius, kron_radius
def make_sep_catalog(data, header, options, mask=None, min_sep=10., do_bgsub=False): try: bkg = sep.Background(data, mask, bw=32, bh=32, fw=3, fh=3) except ValueError: data = data.byteswap().newbyteorder() bkg = sep.Background(data, mask, bw=32, bh=32, fw=3, fh=3) if do_bgsub: error = np.sqrt(data) data_bgsub = data - bkg else: error = bkg.globalrms data_bgsub = data sources = sep.extract(data_bgsub, err=error, mask=mask, **options['sep']) dists = ((sources['x'] - sources['x'][:, np.newaxis])**2 + (sources['y'] - sources['y'][:, np.newaxis])**2)**0.5 closest = np.partition(dists, 1)[:, 1] sources = sources[closest > min_sep] t = table.Table(sources) kronrad, krflag = sep.kron_radius(data_bgsub, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 6.0) flux, fluxerr, flag = sep.sum_ellipse(data_bgsub, sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * kronrad, subpix=1, err=error) t['mag'] = -2.5 * np.log10(flux) t['magerr'] = np.log(10) / 2.5 * fluxerr / flux t['ra'], t['dec'] = WCS(header).all_pix2world(t['x'], t['y'], 0) t = t['x', 'y', 'mag', 'magerr', 'ra', 'dec'] return t
def extract(data): bkg = sep.Background(data, bw=64, bh=64, fw=3, fh=3) bkg.subfrom(data) objs = sep.extract(data, 1.5*bkg.globalrms) flux, fluxerr, flag = sep.sum_circle(data, objs['x'], objs['y'], 5., err=bkg.globalrms) kr, flag = sep.kron_radius(data, objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], 6.0) eflux, efluxerr, eflag = sep.sum_ellipse(data, objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], r=2.5 * kr, err=bkg.globalrms, subpix=1) retstr = "" for i in range(len(objs['x'])): retstr = retstr+(str(objs['x'][i])+"\t"+str(objs['y'][i])+"\t"+str(flux[i])+"\t"+str(fluxerr[i])+"\t"+str(kr[i])+"\t"+str(eflux[i])+"\t"+str(efluxerr[i])+"\t"+str(flag[i])+"\n") return retstr
def findSpot(data, sigma): image=data #m, s = np.mean(image), np.std(image) bkg = sep.Background(image, bw=32, bh=32, fw=3, fh=3) objs = sep.extract(image-bkg, sigma, err=bkg.globalrms) aper_radius=3 # Calculate the Kron Radius kronrad, krflag = sep.kron_radius(image, objs['x'], objs['y'], \ objs['a'], objs['b'], objs['theta'], aper_radius) r_min = 3 use_circle = kronrad * np.sqrt(objs['a'] * objs['b']) cinx=np.where(use_circle <= r_min) einx=np.where(use_circle > r_min) # Calculate the equivalent of FLUX_AUTO flux, fluxerr, flag = sep.sum_ellipse(image, objs['x'][einx], objs['y'][einx], \ objs['a'][einx], objs['b'][einx], objs['theta'][einx], 2.5*kronrad[einx],subpix=1) cflux, cfluxerr, cflag = sep.sum_circle(image, objs['x'][cinx], objs['y'][cinx], objs['a'][cinx], subpix=1) # Adding half pixel to measured coordinate. objs['x'] = objs['x']+0.5 objs['y'] = objs['y']+0.5 objs['flux'][einx]=flux objs['flux'][cinx]=cflux r, flag = sep.flux_radius(image, objs['x'], objs['y'], \ 6*objs['a'], 0.3,normflux=objs['flux'], subpix=5) flag |= krflag objs=rfn.append_fields(objs, 'r', data=r, usemask=False) objects=objs[:] return objects
def kron_radius(components, observation=None, weight_order=0): """ Determine the Kron Radius Parameters ---------- components: a list of `scarlet.Component` or `scarlet.ComponentTree` Component to analyze observation """ if not isinstance(components, list): components = [components] # Determine the centroid, averaged through channels _, y_cen, x_cen = centroid(components, observation=observation) s = shape(components, observation, show_fig=False, weight_order=weight_order) q = s['q'] theta = np.deg2rad(s['pa']) blend = scarlet.Blend(components, observation) model = blend.get_model() mask = (observation.weights == 0) model = model * ~mask depth = model.shape[0] kron = [] if depth > 1: for i in range(depth): r_max = max(model.shape) r = sep.kron_radius(model[i], x_cen, y_cen, 1, 1 * q[i], theta[i], r_max)[0] kron.append(r) return np.array(kron)
def extract_obj(img, b=30, f=5, sigma=5, pixel_scale=0.168, minarea=5, deblend_nthresh=32, deblend_cont=0.005, clean_param=1.0, sky_subtract=False, show_fig=True, verbose=True, flux_auto=True, flux_aper=None): '''Extract objects for a given image, using `sep`. This is from `slug`. Parameters: ---------- img: 2-D numpy array b: float, size of box f: float, size of convolving kernel sigma: float, detection threshold pixel_scale: float Returns: ------- objects: astropy Table, containing the positions, shapes and other properties of extracted objects. segmap: 2-D numpy array, segmentation map ''' # Subtract a mean sky value to achieve better object detection b = 30 # Box size f = 5 # Filter width bkg = sep.Background(img, bw=b, bh=b, fw=f, fh=f) data_sub = img - bkg.back() sigma = sigma if sky_subtract: input_data = data_sub else: input_data = img objects, segmap = sep.extract(input_data, sigma, err=bkg.globalrms, segmentation_map=True, filter_type='matched', deblend_nthresh=deblend_nthresh, deblend_cont=deblend_cont, clean=True, clean_param=clean_param, minarea=minarea) if verbose: print("# Detect %d objects" % len(objects)) objects = Table(objects) objects.add_column(Column(data=np.arange(len(objects)) + 1, name='index')) # Maximum flux, defined as flux within six 'a' in radius. objects.add_column( Column(data=sep.sum_circle(input_data, objects['x'], objects['y'], 6. * objects['a'])[0], name='flux_max')) # Add FWHM estimated from 'a' and 'b'. # This is suggested here: https://github.com/kbarbary/sep/issues/34 objects.add_column( Column(data=2 * np.sqrt(np.log(2) * (objects['a']**2 + objects['b']**2)), name='fwhm_custom')) # Use Kron radius to calculate FLUX_AUTO in SourceExtractor. # Here PHOT_PARAMETER = 2.5, 3.5 if flux_auto: kronrad, krflag = sep.kron_radius(input_data, objects['x'], objects['y'], objects['a'], objects['b'], objects['theta'], 6.0) flux, fluxerr, flag = sep.sum_circle(input_data, objects['x'], objects['y'], 2.5 * (kronrad), subpix=1) flag |= krflag # combine flags into 'flag' r_min = 1.75 # minimum diameter = 3.5 use_circle = kronrad * np.sqrt(objects['a'] * objects['b']) < r_min cflux, cfluxerr, cflag = sep.sum_circle(input_data, objects['x'][use_circle], objects['y'][use_circle], r_min, subpix=1) flux[use_circle] = cflux fluxerr[use_circle] = cfluxerr flag[use_circle] = cflag objects.add_column(Column(data=flux, name='flux_auto')) objects.add_column(Column(data=kronrad, name='kron_rad')) if flux_aper is not None: objects.add_column( Column(data=sep.sum_circle(input_data, objects['x'], objects['y'], flux_aper[0])[0], name='flux_aper_1')) objects.add_column( Column(data=sep.sum_circle(input_data, objects['x'], objects['y'], flux_aper[1])[0], name='flux_aper_2')) objects.add_column( Column(data=sep.sum_circann(input_data, objects['x'], objects['y'], flux_aper[0], flux_aper[1])[0], name='flux_ann')) ''' objects.add_column(Column(data=sep.sum_circle(input_data, objects['x'], objects['y'], flux_aper[0] * objects['a'])[0], name='flux_aper_1')) objects.add_column(Column(data=sep.sum_circle(input_data, objects['x'], objects['y'], flux_aper[1] * objects['a'])[0], name='flux_aper_2')) objects.add_column(Column(data=sep.sum_circann(input_data, objects['x'], objects['y'], flux_aper[0] * objects['a'], flux_aper[1] * objects['a'])[0], name='flux_ann')) ''' # plot background-subtracted image if show_fig: fig, ax = plt.subplots(1, 2, figsize=(12, 6)) ax[0] = display_single(data_sub, ax=ax[0], scale_bar=False, pixel_scale=pixel_scale) from matplotlib.patches import Ellipse # plot an ellipse for each object for obj in objects: e = Ellipse(xy=(obj['x'], obj['y']), width=8 * obj['a'], height=8 * obj['b'], angle=obj['theta'] * 180. / np.pi) e.set_facecolor('none') e.set_edgecolor('red') ax[0].add_artist(e) ax[1] = display_single(segmap, scale='linear', cmap=SEG_CMAP, ax=ax[1]) return objects, segmap
def do_stage(self, images): for i, image in enumerate(images): try: # Set the number of source pixels to be 5% of the total. This keeps us safe from # satellites and airplanes. sep.set_extract_pixstack(int(image.nx * image.ny * 0.05)) data = image.data.copy() error = (np.abs(data) + image.readnoise**2.0)**0.5 mask = image.bpm > 0 # Fits can be backwards byte order, so fix that if need be and subtract # the background try: bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) except ValueError: data = data.byteswap(True).newbyteorder() bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) bkg.subfrom(data) # Do an initial source detection # TODO: Add back in masking after we are sure SEP works sources = sep.extract(data, self.threshold, minarea=self.min_area, err=error, deblend_cont=0.005) # Convert the detections into a table sources = Table(sources) # Calculate the ellipticity sources['ellipticity'] = 1.0 - (sources['b'] / sources['a']) # Fix any value of theta that are invalid due to floating point rounding # -pi / 2 < theta < pi / 2 sources['theta'][sources['theta'] > (np.pi / 2.0)] -= np.pi sources['theta'][sources['theta'] < (-np.pi / 2.0)] += np.pi # Calculate the kron radius kronrad, krflag = sep.kron_radius(data, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 6.0) sources['flag'] |= krflag sources['kronrad'] = kronrad # Calcuate the equivilent of flux_auto flux, fluxerr, flag = sep.sum_ellipse(data, sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * kronrad, subpix=1, err=error) sources['flux'] = flux sources['fluxerr'] = fluxerr sources['flag'] |= flag # Calculate the FWHMs of the stars: fwhm = 2.0 * (np.log(2) * (sources['a']**2.0 + sources['b']**2.0))**0.5 sources['fwhm'] = fwhm # Cut individual bright pixels. Often cosmic rays sources = sources[fwhm > 1.0] # Measure the flux profile flux_radii, flag = sep.flux_radius(data, sources['x'], sources['y'], 6.0 * sources['a'], [0.25, 0.5, 0.75], normflux=sources['flux'], subpix=5) sources['flag'] |= flag sources['fluxrad25'] = flux_radii[:, 0] sources['fluxrad50'] = flux_radii[:, 1] sources['fluxrad75'] = flux_radii[:, 2] # Calculate the windowed positions sig = 2.0 / 2.35 * sources['fluxrad50'] xwin, ywin, flag = sep.winpos(data, sources['x'], sources['y'], sig) sources['flag'] |= flag sources['xwin'] = xwin sources['ywin'] = ywin # Calculate the average background at each source bkgflux, fluxerr, flag = sep.sum_ellipse(bkg.back(), sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * sources['kronrad'], subpix=1) #masksum, fluxerr, flag = sep.sum_ellipse(mask, sources['x'], sources['y'], # sources['a'], sources['b'], np.pi / 2.0, # 2.5 * kronrad, subpix=1) background_area = ( 2.5 * sources['kronrad'] )**2.0 * sources['a'] * sources['b'] * np.pi # - masksum sources['background'] = bkgflux sources['background'][background_area > 0] /= background_area[ background_area > 0] # Update the catalog to match fits convention instead of python array convention sources['x'] += 1.0 sources['y'] += 1.0 sources['xpeak'] += 1 sources['ypeak'] += 1 sources['xwin'] += 1.0 sources['ywin'] += 1.0 sources['theta'] = np.degrees(sources['theta']) image.catalog = sources['x', 'y', 'xwin', 'ywin', 'xpeak', 'ypeak', 'flux', 'fluxerr', 'background', 'fwhm', 'a', 'b', 'theta', 'kronrad', 'ellipticity', 'fluxrad25', 'fluxrad50', 'fluxrad75', 'x2', 'y2', 'xy', 'flag'] # Add the units and description to the catalogs image.catalog['x'].unit = 'pixel' image.catalog['x'].description = 'X coordinate of the object' image.catalog['y'].unit = 'pixel' image.catalog['y'].description = 'Y coordinate of the object' image.catalog['xwin'].unit = 'pixel' image.catalog[ 'xwin'].description = 'Windowed X coordinate of the object' image.catalog['ywin'].unit = 'pixel' image.catalog[ 'ywin'].description = 'Windowed Y coordinate of the object' image.catalog['xpeak'].unit = 'pixel' image.catalog['xpeak'].description = 'X coordinate of the peak' image.catalog['ypeak'].unit = 'pixel' image.catalog[ 'ypeak'].description = 'Windowed Y coordinate of the peak' image.catalog['flux'].unit = 'counts' image.catalog[ 'flux'].description = 'Flux within a Kron-like elliptical aperture' image.catalog['fluxerr'].unit = 'counts' image.catalog[ 'fluxerr'].description = 'Erronr on the flux within a Kron-like elliptical aperture' image.catalog['background'].unit = 'counts' image.catalog[ 'background'].description = 'Average background value in the aperture' image.catalog['fwhm'].unit = 'pixel' image.catalog['fwhm'].description = 'FWHM of the object' image.catalog['a'].unit = 'pixel' image.catalog[ 'a'].description = 'Semi-major axis of the object' image.catalog['b'].unit = 'pixel' image.catalog[ 'b'].description = 'Semi-minor axis of the object' image.catalog['theta'].unit = 'degrees' image.catalog[ 'theta'].description = 'Position angle of the object' image.catalog['kronrad'].unit = 'pixel' image.catalog[ 'kronrad'].description = 'Kron radius used for extraction' image.catalog['ellipticity'].description = 'Ellipticity' image.catalog['fluxrad25'].unit = 'pixel' image.catalog[ 'fluxrad25'].description = 'Radius containing 25% of the flux' image.catalog['fluxrad50'].unit = 'pixel' image.catalog[ 'fluxrad50'].description = 'Radius containing 50% of the flux' image.catalog['fluxrad75'].unit = 'pixel' image.catalog[ 'fluxrad75'].description = 'Radius containing 75% of the flux' image.catalog['x2'].unit = 'pixel^2' image.catalog[ 'x2'].description = 'Variance on X coordinate of the object' image.catalog['y2'].unit = 'pixel^2' image.catalog[ 'y2'].description = 'Variance on Y coordinate of the object' image.catalog['xy'].unit = 'pixel^2' image.catalog['xy'].description = 'XY covariance of the object' image.catalog[ 'flag'].description = 'Bit mask combination of extraction and photometry flags' image.catalog.sort('flux') image.catalog.reverse() logging_tags = logs.image_config_to_tags( image, self.group_by_keywords) logs.add_tag(logging_tags, 'filename', os.path.basename(image.filename)) # Save some background statistics in the header mean_background = stats.sigma_clipped_mean(bkg.back(), 5.0) image.header['L1MEAN'] = ( mean_background, '[counts] Sigma clipped mean of frame background') logs.add_tag(logging_tags, 'L1MEAN', float(mean_background)) median_background = np.median(bkg.back()) image.header['L1MEDIAN'] = ( median_background, '[counts] Median of frame background') logs.add_tag(logging_tags, 'L1MEDIAN', float(median_background)) std_background = stats.robust_standard_deviation(bkg.back()) image.header['L1SIGMA'] = ( std_background, '[counts] Robust std dev of frame background') logs.add_tag(logging_tags, 'L1SIGMA', float(std_background)) # Save some image statistics to the header good_objects = image.catalog['flag'] == 0 seeing = np.median( image.catalog['fwhm'][good_objects]) * image.pixel_scale image.header['L1FWHM'] = (seeing, '[arcsec] Frame FWHM in arcsec') logs.add_tag(logging_tags, 'L1FWHM', float(seeing)) mean_ellipticity = stats.sigma_clipped_mean( sources['ellipticity'][good_objects], 3.0) image.header['L1ELLIP'] = (mean_ellipticity, 'Mean image ellipticity (1-B/A)') logs.add_tag(logging_tags, 'L1ELLIP', float(mean_ellipticity)) mean_position_angle = stats.sigma_clipped_mean( sources['theta'][good_objects], 3.0) image.header['L1ELLIPA'] = ( mean_position_angle, '[deg] PA of mean image ellipticity') logs.add_tag(logging_tags, 'L1ELLIPA', float(mean_position_angle)) self.logger.info('Extracted sources', extra=logging_tags) except Exception as e: logging_tags = logs.image_config_to_tags( image, self.group_by_keywords) logs.add_tag(logging_tags, 'filename', os.path.basename(image.filename)) self.logger.error(e, extra=logging_tags) return images
def detect_with_sep( event, detect_thresh=2., npixels=8, grow_seg=5, gauss_fwhm=2., gsize=3, im_wcs=None, ): """ Run SExtractor on a FITS file contained in the Lambda event This function will generate a catalog and a PNG for the FITS file stored in the Lambda event. The catalog and PNG will be stored in the s3 output bucket specified by the Lambda event. Parameters ---------- event : dict dict containing the data passed to the Lambda function detect_thresh: int, detection threshold to use for sextractor npixels: int, minimum number of pixels comprising an object grow_seg: int, gauss_fwhm: float, FWHM of the kernel to use for filtering prior to source finding gsize: float im_wcs: astropy.wcs.WCS WCS object defining the coordinate system of the observation Returns ------- """ drz_file = event['fits_s3_key'] drz_file_bucket = event['fits_s3_bucket'] fname = drz_file.split('/')[-1] s3 = boto3.resource('s3') bkt = s3.Bucket(drz_file_bucket) bkt.download_file(drz_file, f"/tmp/{fname}", ExtraArgs={"RequestPayer": "requester"}) im = fits.open(f"/tmp/{fname}") if im_wcs is None: im_wcs = wcs.WCS(im[1].header, relax=True) data = im[1].data.byteswap().newbyteorder() wht_data = im[2].data.byteswap().newbyteorder() data_mask = np.cast[data.dtype](data == 0) ## Get AB zeropoint try: photfnu = im[0].header['PHOTFNU'] except KeyError as e: LOG.warning(e) ZP = None else: ZP = -2.5 * np.log10(photfnu) + 8.90 try: photflam = im[0].header['PHOTFLAM'] except KeyError as e: LOG.warning(e) ZP = None else: ZP = -2.5*np.log10(photflam) - 21.10 - \ 5*np.log10(im[0].header['PHOTPLAM']) + 18.6921 if ZP is None: msg = ("Whoops! No zeropoint information found in primary header, " f"skipping file {fname}") LOG.warning(msg) # Scale fluxes to mico-Jy uJy_to_dn = 1 / (3631 * 1e6 * 10**(-0.4 * ZP)) # set up the error array err = 1 / np.sqrt(wht_data) err[~np.isfinite(err)] = 0 mask = (err == 0) # get the background bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) bkg_data = bkg.back() ratio = bkg.rms() / err err_scale = np.median(ratio[(~mask) & np.isfinite(ratio)]) err *= err_scale # Generate a kernel to use for filtering gaussian_kernel = kernels.Gaussian2DKernel( x_stddev=gauss_fwhm / gaussian_sigma_to_fwhm, y_stddev=gauss_fwhm / gaussian_sigma_to_fwhm, x_size=7, y_size=7) # Normalize the kernel gaussian_kernel.normalize() # Package the inputs for sextractor inputs = { 'err': err, 'mask': mask, 'filter_kernel': gaussian_kernel.array, 'filter_type': 'conv', 'minarea': npixels, 'deblend_nthresh': 32, 'deblend_cont': 0.005, 'clean': True, 'clean_param': 1, 'segmentation_map': False } objects = sep.extract(data - bkg_data, detect_thresh, **inputs) catalog = Table(objects) # add things to catalog autoparams = [2.5, 3.5] catalog['number'] = np.arange(len(catalog), dtype=np.int32) + 1 catalog['theta'] = np.clip(catalog['theta'], -np.pi / 2, np.pi / 2) # filter out any NaNs for c in ['a', 'b', 'x', 'y', 'theta']: catalog = catalog[np.isfinite(catalog[c])] catalog['ra'], catalog['dec'] = im_wcs.all_pix2world( catalog['x'], catalog['y'], 1) catalog['ra'].unit = u.deg catalog['dec'].unit = u.deg catalog['x_world'], catalog['y_world'] = catalog['ra'], catalog['dec'] kronrad, krflag = sep.kron_radius(data - bkg_data, catalog['x'], catalog['y'], catalog['a'], catalog['b'], catalog['theta'], 6.0) kronrad *= autoparams[0] kronrad[~np.isfinite(kronrad)] = autoparams[1] kronrad = np.maximum(kronrad, autoparams[1]) kron_out = sep.sum_ellipse(data - bkg_data, catalog['x'], catalog['y'], catalog['a'], catalog['b'], catalog['theta'], kronrad, subpix=5, err=err) kron_flux, kron_fluxerr, kron_flag = kron_out kron_flux_flag = kron_flag catalog['mag_auto_raw'] = ZP - 2.5 * np.log10(kron_flux) catalog['magerr_auto_raw'] = 2.5 / np.log(10) * kron_fluxerr / kron_flux catalog['mag_auto'] = catalog['mag_auto_raw'] * 1. catalog['magerr_auto'] = catalog['magerr_auto_raw'] * 1. catalog['kron_radius'] = kronrad * u.pixel catalog['kron_flag'] = krflag catalog['kron_flux_flag'] = kron_flux_flag # Make a plot im_data = im[1].data im_shape = im_data.shape im_data[np.isnan(im_data)] = 0.0 # Trim the top and bottom 1 percent of pixel values top = np.percentile(im_data, 99) im_data[im_data > top] = top bottom = np.percentile(im_data, 1) im_data[im_data < bottom] = bottom # Scale the data. im_data = im_data - im_data.min() im_data = (im_data / im_data.max()) * 255. im_data = np.uint8(im_data) f, (ax) = plt.subplots(1, 1, sharex=True) f.set_figheight(12) f.set_figwidth(12) ax.imshow(im_data, cmap="Greys", clim=(0, 255), origin='lower') ax.plot(catalog['x'], catalog['y'], 'o', markeredgewidth=1, markeredgecolor='red', markerfacecolor='None') ax.set_xlim([-0.05 * im_shape[1], 1.05 * im_shape[1]]) ax.set_ylim([-0.05 * im_shape[0], 1.05 * im_shape[0]]) basename = fname.split('_')[0] f.savefig(f"/tmp/{basename}.png") # Write the catalog to local disk catalog.write(f"/tmp/{basename}.catalog.fits", format='fits') # Write out to S3 s3 = boto3.resource('s3') s3.meta.client.upload_file(f"/tmp/{basename}.catalog.fits", event['s3_output_bucket'], f"{basename}/{basename}.catalog.fits") s3.meta.client.upload_file(f"/tmp/{basename}.png", event['s3_output_bucket'], f"{basename}/{basename}.png")
def sourceExtract(data, thresh=3, bkg=False, bkg_rms=None, err=None, mask=None, min_area=5, deblend_cont=0.05, segment=False, extras=False): """ Extract all sources above a certain threshold in the given image Parameters ---------- data : array-like CCD image frame from which to extract sources thresh : float, optional Number of sigma a detection must be above the background to be flagged as a source - if err not given, bkg_rms is needed Default = 3 bkg : bool, optional Toggle to model spatially varying background and subtract from data - by default assumes this has been done separately Default = False bkg_rms : float, optional Estimation of the global background noise - used to determine threshold if err is None - can calculate global background rms when subtracting background model Default = None err : array-like, optional Error array for the CCD frame - supersedes bkg_rms when determining the threshold Default = None min_area : int, optional Minimum number of pixels to be flagged as a source Default = 5 deblend_cont : float, optional Minimum contrast ratio used by SEP for deblending Default = 0.05 segment : bool, optional Toggle to generate a segmentation map for the given image Default = False extras : bool, optional Toggle to calculate ellipticity, FWHM, Kron radius and flux radius Default = False Returns ------- sources : astropy Table object Table containing quantities determined by sep for each source detected in the given image segmentation_map : array-like, optional Array of integers with same shape as data - pixels not belonging to any object have value 0, whilst all pixels belonging to ith object have value (e.g. sources[i]) have value i+1 - only returned if seg_map is True Raises ------ None """ # subtract spatially varying background model if requested if bkg: data, bkg_rms = subtractBackground(data) # determine threshold for extraction if err is None: thresh *= bkg_rms # extract sources if not segment: sources = sep.extract(data, thresh, err=err, mask=mask, deblend_cont=deblend_cont) else: sources, seg_map = sep.extract(data, thresh, err=err, mask=mask, deblend_cont=deblend_cont, segmentation_map=seg_map) sources = Table(sources) # remove nans from table sources = pruneNansFromTable(sources) if extras: # calculate ellipticity parameter sources['ellipticity'] = 1.0 - (sources['b'] / sources['a']) # calculate full width half maxima sources['fwhm'] = calculateFWHM(sources['a'], sources['b']) # compute kron radii try: sources['kronr'], krflag = sep.kron_radius(data, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 6.0) sources['flag'] |= krflag except Exception as e: print(e) pass # compute flux radii try: sources['fluxr'], frflag = sep.flux_radius(data, sources['x'], sources['y'], 6.0 * sources['a'], 0.5, subpix=5) sources['flag'] |= frflag except Exception as e: print(e) pass if segment: return sources, seg_map else: return sources
def aperphot(image,objects,aper=[3],gain=None,mag_zeropoint=25.0): """ Aperture photometry using sep. Parameters ---------- im : CCDData object The image to estimate the background for. objects : table Table of objects with x/y coordinate. aper : float, optional Radius of the aperture. Default is 3.0 pixels. gain : float, optional The gain. Default is 1. mag_zeropoint : float The magnitude zero-point to use. Default is 25. Returns ------- phot : astropy table Catalog of measured aperture photometry and other SE parameters. Example ------- phot = aperphot(im,objects) """ if isinstance(image,CCDData) is False: raise ValueError("Image must be a CCDData object") # Get C-continuous data data,error,mask,sky = image.ccont data_sub = data-sky # Get gain from image if possible gain = image.gain # Initialize the output catalog outcat = objects.copy() # Circular aperture photometry for i,ap in enumerate(aper): apflux, apfluxerr, apflag = sep.sum_circle(data_sub, outcat['x'], outcat['y'], ap, err=error, mask=mask, gain=gain) # Add to the catalog outcat['flux_aper'+str(i+1)] = apflux outcat['fluxerr_aper'+str(i+1)] = apfluxerr outcat['mag_aper'+str(i+1)] = -2.5*np.log10(apflux)+mag_zeropoint outcat['magerr_aper'+str(i+1)] = (2.5/np.log(10))*(apfluxerr/apflux) outcat['flag_aper'+str(i+1)] = apflag # Make sure theta's are between -pi/2 and +pi/2 radians if 'theta' in objects.columns: theta = objects['theta'].copy() hi = theta>0.5*np.pi if np.sum(hi)>0: theta[hi] -= np.pi lo = theta<-0.5*np.pi if np.sum(lo)>0: theta[lo] += np.pi else: theta = np.zeros(len(outcat),float) # We have morphology parameters if 'a' in outcat.columns and 'b' in outcat.columns: kronrad, krflag = sep.kron_radius(data_sub, outcat['x'], outcat['y'], outcat['a'], outcat['b'], theta, 6.0, mask=mask) else: kronrad, krflag = None, None # Add more columns outcat['flux_auto'] = 0.0 outcat['fluxerr_auto'] = 0.0 outcat['mag_auto'] = 0.0 outcat['magerr_auto'] = 0.0 outcat['kronrad'] = kronrad outcat['flag_auto'] = np.int16(0) # BACKGROUND ANNULUS??? # FLUX_AUTO # Only use elliptical aperture if Kron radius is large enough # Use circular aperture photometry if the Kron radius is too small r_min = 1.75 # minimum diameter = 3.5 if kronrad is not None: use_circle = kronrad * np.sqrt(outcat['a'] * outcat['b']) < r_min else: use_circle = np.ones(len(outcat),bool) nuse_ellipse = np.sum(~use_circle) nuse_circle = np.sum(use_circle) # Elliptical aperture if nuse_ellipse>0: flux, fluxerr, flag = sep.sum_ellipse(data=data_sub, x=outcat['x'][~use_circle], y=outcat['y'][~use_circle], a=outcat['a'][~use_circle],b=outcat['b'][~use_circle], theta=outcat['theta'][~use_circle], r=2.5*kronrad[~use_circle], subpix=1, err=error, mask=mask) flag |= krflag[~use_circle] # combine flags into 'flag' outcat['flux_auto'][~use_circle] = flux outcat['fluxerr_auto'][~use_circle] = fluxerr outcat['mag_auto'][~use_circle] = -2.5*np.log10(flux)+mag_zeropoint outcat['magerr_auto'][~use_circle] = (2.5/np.log(10))*(fluxerr/flux) outcat['flag_auto'][~use_circle] = flag # Use circular aperture photometry if the Kron radius is too small if nuse_circle>0: cflux, cfluxerr, cflag = sep.sum_circle(data_sub, outcat['x'][use_circle], outcat['y'][use_circle], r_min, subpix=1, err=error, mask=mask) outcat['flux_auto'][use_circle] = cflux outcat['fluxerr_auto'][use_circle] = cfluxerr outcat['mag_auto'][use_circle] = -2.5*np.log10(cflux)+mag_zeropoint outcat['magerr_auto'][use_circle] = (2.5/np.log(10))*(cfluxerr/cflux) outcat['flag_auto'][use_circle] = cflag outcat['kronrad'][use_circle] = r_min # Add S/N outcat['snr'] = 1.087/outcat['magerr_auto'] return outcat
def test_vs_sextractor(): """Test behavior of sep versus sextractor. Note: we turn deblending off for this test. This is because the deblending algorithm uses a random number generator. Since the sequence of random numbers is not the same between sextractor and sep or between different platforms, object member pixels (and even the number of objects) can differ when deblending is on. Deblending is turned off by setting DEBLEND_MINCONT=1.0 in the sextractor configuration file and by setting deblend_cont=1.0 in sep.extract(). """ data = np.copy(image_data) # make an explicit copy so we can 'subfrom' bkg = sep.Background(data, bw=64, bh=64, fw=3, fh=3) # Test that SExtractor background is same as SEP: bkgarr = bkg.back(dtype=np.float32) assert_allclose(bkgarr, image_refback, rtol=1.e-5) # Extract objects (use deblend_cont=1.0 to disable deblending). bkg.subfrom(data) objs = sep.extract(data, 1.5*bkg.globalrms, deblend_cont=1.0) objs = np.sort(objs, order=['y']) # Read SExtractor result refobjs = np.loadtxt(IMAGECAT_FNAME, dtype=IMAGECAT_DTYPE) refobjs = np.sort(refobjs, order=['y']) # Found correct number of sources at the right locations? assert_allclose(objs['x'], refobjs['x'] - 1., atol=1.e-3) assert_allclose(objs['y'], refobjs['y'] - 1., atol=1.e-3) # Test aperture flux flux, fluxerr, flag = sep.sum_circle(data, objs['x'], objs['y'], 5., err=bkg.globalrms) assert_allclose(flux, refobjs['flux_aper'], rtol=2.e-4) assert_allclose(fluxerr, refobjs['fluxerr_aper'], rtol=1.0e-5) # check if the flags work at all (comparison values assert ((flag & sep.APER_TRUNC) != 0).sum() == 4 assert ((flag & sep.APER_HASMASKED) != 0).sum() == 0 # Test "flux_auto" kr, flag = sep.kron_radius(data, objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], 6.0) flux, fluxerr, flag = sep.sum_ellipse(data, objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], r=2.5 * kr, err=bkg.globalrms, subpix=1) # For some reason, one object doesn't match. It's very small # and kron_radius is set to 0.0 in SExtractor, but 0.08 in sep. # Could be due to a change in SExtractor between v2.8.6 (used to # generate "truth" catalog) and v2.18.11 (from which sep was forked). i = 56 # index is 59 when deblending is on. kr[i] = 0.0 flux[i] = 0.0 fluxerr[i] = 0.0 # We use atol for radius because it is reported to nearest 0.01 in # reference objects. assert_allclose(2.5*kr, refobjs['kron_radius'], atol=0.01, rtol=0.) assert_allclose(flux, refobjs['flux_auto'], rtol=0.0005) assert_allclose(fluxerr, refobjs['fluxerr_auto'], rtol=0.0005) # Test ellipse representation conversion cxx, cyy, cxy = sep.ellipse_coeffs(objs['a'], objs['b'], objs['theta']) assert_allclose(cxx, objs['cxx'], rtol=1.e-4) assert_allclose(cyy, objs['cyy'], rtol=1.e-4) assert_allclose(cxy, objs['cxy'], rtol=1.e-4) a, b, theta = sep.ellipse_axes(objs['cxx'], objs['cyy'], objs['cxy']) assert_allclose(a, objs['a'], rtol=1.e-4) assert_allclose(b, objs['b'], rtol=1.e-4) assert_allclose(theta, objs['theta'], rtol=1.e-4) #test round trip cxx, cyy, cxy = sep.ellipse_coeffs(a, b, theta) assert_allclose(cxx, objs['cxx'], rtol=1.e-4) assert_allclose(cyy, objs['cyy'], rtol=1.e-4) assert_allclose(cxy, objs['cxy'], rtol=1.e-4) # test flux_radius fr, flags = sep.flux_radius(data, objs['x'], objs['y'], 6.*refobjs['a'], [0.1, 0.5, 0.6], normflux=refobjs['flux_auto'], subpix=5) assert_allclose(fr, refobjs["flux_radius"], rtol=0.04, atol=0.01) # test winpos sig = 2. / 2.35 * fr[:, 1] # flux_radius = 0.5 xwin, ywin, flag = sep.winpos(data, objs['x'], objs['y'], sig) assert_allclose(xwin, refobjs["xwin"] - 1., rtol=0., atol=0.0025) assert_allclose(ywin, refobjs["ywin"] - 1., rtol=0., atol=0.0025)
async def __call__(self, image: Image) -> Image: """Find stars in given image and append catalog. Args: image: Image to find stars in. Returns: Image with attached catalog. """ import sep loop = asyncio.get_running_loop() # got data? if image.data is None: log.warning("No data found in image.") return image # no mask? mask = image.mask if image.mask is not None else np.zeros( image.data.shape, dtype=bool) # remove background data, bkg = SepSourceDetection.remove_background(image.data, mask) # extract sources sources = await loop.run_in_executor( None, partial( sep.extract, data, self.threshold, err=bkg.globalrms, minarea=self.minarea, deblend_nthresh=self.deblend_nthresh, deblend_cont=self.deblend_cont, clean=self.clean, clean_param=self.clean_param, mask=image.mask, ), ) # convert to astropy table sources = pd.DataFrame(sources) # only keep sources with detection flag < 8 sources = sources[sources["flag"] < 8] x, y = sources["x"], sources["y"] # Calculate the ellipticity sources["ellipticity"] = 1.0 - (sources["b"] / sources["a"]) # calculate the FWHMs of the stars fwhm = 2.0 * (np.log(2) * (sources["a"]**2.0 + sources["b"]**2.0))**0.5 sources["fwhm"] = fwhm # clip theta to [-pi/2,pi/2] sources["theta"] = sources["theta"].clip(lower=np.pi / 2, upper=np.pi / 2) # Kron radius kronrad, krflag = sep.kron_radius(data, x, y, sources["a"], sources["b"], sources["theta"], 6.0) sources["flag"] |= krflag sources["kronrad"] = kronrad # equivalent of FLUX_AUTO gain = image.header["DET-GAIN"] if "DET-GAIN" in image.header else None flux, fluxerr, flag = await loop.run_in_executor( None, partial( sep.sum_ellipse, data, x, y, sources["a"], sources["b"], sources["theta"], 2.5 * kronrad, subpix=5, mask=image.mask, gain=gain, ), ) sources["flag"] |= flag sources["flux"] = flux # radii at 0.25, 0.5, and 0.75 flux flux_radii, flag = sep.flux_radius(data, x, y, 6.0 * sources["a"], [0.25, 0.5, 0.75], normflux=sources["flux"], subpix=5) sources["flag"] |= flag sources["fluxrad25"] = flux_radii[:, 0] sources["fluxrad50"] = flux_radii[:, 1] sources["fluxrad75"] = flux_radii[:, 2] # xwin/ywin sig = 2.0 / 2.35 * sources["fluxrad50"] xwin, ywin, flag = sep.winpos(data, x, y, sig) sources["flag"] |= flag sources["xwin"] = xwin sources["ywin"] = ywin # theta in degrees sources["theta"] = np.degrees(sources["theta"]) # only keep sources with detection flag < 8 sources = sources[sources["flag"] < 8] # match fits conventions sources["x"] += 1 sources["y"] += 1 # pick columns for catalog cat = sources[[ "x", "y", "peak", "flux", "fwhm", "a", "b", "theta", "ellipticity", "tnpix", "kronrad", "fluxrad25", "fluxrad50", "fluxrad75", "xwin", "ywin", ]] # copy image, set catalog and return it img = image.copy() img.catalog = Table.from_pandas(cat) return img
def test_masked_segmentation_measurements(): """Test measurements with segmentation masking""" NX = 100 data = np.zeros((NX*2,NX*2)) yp, xp = np.indices(data.shape) #### # Make two 2D gaussians that slightly overlap # width of the 2D objects gsigma = 10. # offset between two gaussians in sigmas off = 4 for xy in [[NX,NX], [NX+off*gsigma, NX+off*gsigma]]: R = np.sqrt((xp-xy[0])**2+(yp-xy[1])**2) g_i = np.exp(-R**2/2/gsigma**2) data += g_i # Absolute total total_exact = g_i.sum() # Add some noise rms = 0.02 np.random.seed(1) data += np.random.normal(size=data.shape)*rms # Run source detection objs, segmap = sep.extract(data, thresh=1.2, err=rms, mask=None, segmentation_map=True) seg_id = np.arange(1, len(objs)+1, dtype=np.int32) # Compute Kron/Auto parameters x, y, a, b = objs['x'], objs['y'], objs['a'], objs['b'] theta = objs['theta'] kronrad, krflag = sep.kron_radius(data, x, y, a, b, theta, 6.0) flux_auto, fluxerr, flag = sep.sum_ellipse(data, x, y, a, b, theta, 2.5*kronrad, segmap=segmap, seg_id=seg_id, subpix=1) # Test total flux assert_allclose(flux_auto, total_exact, rtol=5.e-2) # Flux_radius for flux_fraction in [0.2, 0.5]: # Exact solution rhalf_exact = np.sqrt(-np.log(1-flux_fraction)*gsigma**2*2) # Masked measurement flux_radius, flag = sep.flux_radius(data, x, y, 6.*a, flux_fraction, seg_id=seg_id, segmap=segmap, normflux=flux_auto, subpix=5) # Test flux fraction assert_allclose(flux_radius, rhalf_exact, rtol=5.e-2) if False: print('test_masked_flux_radius') print(total_exact, flux_auto) print(rhalf_exact, flux_radius)
def sourcephot(catalogue,image,segmap,detection,instrument='MUSE',dxp=0.,dyp=0., noise=[False],zpab=False, kn=2.5, circap=1.0): """ Get a source catalogue from findsources and a fits image with ZP and compute magnitudes in that filter catalogue -> source cat from findsources image -> fits image with ZP in header segmap -> fits of segmentation map detection -> the detection image, used to compute Kron radius instrument -> if not MUSE, map positions from detection to image dxp,dyp -> shifts in pixel of image to register MUSE and image astrometry noise -> if set to a noise model, use equation noise[0]*noise[1]*npix**noise[2] to compute the error zpab -> if ZPAB (zeropoint AB) not stored in header, must be supplied kn -> factor to be used when scaling Kron apertures [sextractor default 2.5] circap -> radius in arcsec for aperture photmetry to be used when Kron aperture fails """ from astropy.io import fits import numpy as np import sep import matplotlib.pyplot as plt from astropy.table import Table from astropy import wcs #grab root name rname=((image.split('/')[-1]).split('.fits'))[0] print ('Working on {}'.format(rname)) #open the catalogue/fits cat=fits.open(catalogue) img=fits.open(image) seg=fits.open(segmap) det=fits.open(detection) #grab reference wcs from detection image wref=wcs.WCS(det[0].header) psref=wref.pixel_scale_matrix[1,1]*3600. print ('Reference pixel size {}'.format(psref)) #if not handling MUSE, special cases for format of data if('MUSE' not in instrument): #handle instrument cases if('LRIS' in instrument): #data imgdata=img[1].data #place holder for varaince as will use noise model below vardata=imgdata*0+1 vardata=vardata.byteswap(True).newbyteorder() #grab wcs image wimg=wcs.WCS(img[1].header) psimg=wimg.pixel_scale_matrix[1,1]*3600. #store the ZP if(zpab): img[0].header['ZPAB']=zpab else: print 'Instrument not supported!!' exit() else: #for muse, keep eveything the same imgdata=img[0].data vardata=img[1].data psimg=psref #grab flux and var dataflx=np.nan_to_num(imgdata.byteswap(True).newbyteorder()) datavar=np.nan_to_num(vardata.byteswap(True).newbyteorder()) #grab detection and seg mask detflx=np.nan_to_num(det[0].data.byteswap(True).newbyteorder()) #go back to 1d segmask=(np.nan_to_num(seg[0].data.byteswap(True).newbyteorder()))[0,:,:] #if needed, map the segmap to new image with transformation if('MUSE' not in instrument): #allocate space for transformed segmentation map segmasktrans=np.zeros(dataflx.shape) print "Remapping segmentation map to new image..." #loop over original segmap and map to trasformed one #Just use nearest pixel, and keep only 1 when multiple choices for xx in range(segmask.shape[0]): for yy in range(segmask.shape[1]): #go to world radec=wref.wcs_pix2world([[yy,xx]],0) #back to new instrument pixel newxy=wimg.wcs_world2pix(radec,0) #apply shift to register WCS newxy[0][1]=newxy[0][1]+dyp newxy[0][0]=newxy[0][0]+dxp segmasktrans[newxy[0][1],newxy[0][0]]=segmask[xx,yy] #grow buffer as needed by individual instruments #This accounts for resampling to finer pixel size if('LRIS' in instrument): segmasktrans[newxy[0][1]+1,newxy[0][0]+1]=segmask[xx,yy] segmasktrans[newxy[0][1]-1,newxy[0][0]-1]=segmask[xx,yy] segmasktrans[newxy[0][1]+1,newxy[0][0]-1]=segmask[xx,yy] segmasktrans[newxy[0][1]-1,newxy[0][0]+1]=segmask[xx,yy] segmasktrans[newxy[0][1]+1,newxy[0][0]]=segmask[xx,yy] segmasktrans[newxy[0][1]-1,newxy[0][0]]=segmask[xx,yy] segmasktrans[newxy[0][1],newxy[0][0]-1]=segmask[xx,yy] segmasktrans[newxy[0][1],newxy[0][0]+1]=segmask[xx,yy] #dump the transformed segmap for checking hdumain = fits.PrimaryHDU(segmasktrans,header=img[1].header) hdulist = fits.HDUList(hdumain) hdulist.writeto("{}_segremap.fits".format(rname),clobber=True) else: #no transformation needed segmasktrans=segmask #source to extract nsrc=len(cat[1].data) print('Extract photometry for {} sources'.format(nsrc)) phot = Table(names=('ID', 'MAGAP', 'MAGAP_ERR','FXAP', 'FXAP_ERR', 'RAD', 'MAGSEG', 'MAGSEG_ERR', 'FXSEG', 'FXSEG_ERR','ZP'), dtype=('i4','f4','f4','f4','f4','f4','f4','f4','f4','f4','f4')) #create check aperture mask checkaperture=np.zeros(dataflx.shape) print('Computing photometry for objects...') #loop over each source for idobj in range(nsrc): ######### #Find positions etc and transform as appropriate ######### #extract MUSE source paramaters x= cat[1].data['x'][idobj] y= cat[1].data['y'][idobj] a= cat[1].data['a'][idobj] b= cat[1].data['b'][idobj] theta= cat[1].data['theta'][idobj] #compute kron radius on MUSE detection image #Kron rad in units of a,b tmpdata=np.copy(detflx) tmpmask=np.copy(segmask) #mask all other sources to avoid overlaps but keep desired one pixels=np.where(tmpmask == idobj+1) tmpmask[pixels]=0 #compute kron radius [pixel of reference image] kronrad, flg = sep.kron_radius(tmpdata,x,y,a,b,theta,6.0,mask=tmpmask) #plt.imshow(np.log10(tmpdata+1),origin='low') #plt.show() #exit() #now check if size is sensible in units of MUSE data rmin = 2.0 #MUSE pix use_circle = kronrad * np.sqrt(a*b) < rmin #use circular aperture of 2" in muse pixel unit rcircap = circap/psref #now use info to compute photometry and apply #spatial transformation if needed if('MUSE' not in instrument): #map centre of aperture - +1 reference #go to world radec=wref.wcs_pix2world([[x,y]],1) #back to new instrument pixel newxy=wimg.wcs_world2pix(radec,1) #apply shift to register WCS xphot=newxy[0][0]+dxp yphot=newxy[0][1]+dyp #scale radii to new pixel size rminphot=rcircap*psref/psimg aphot=a*psref/psimg bphot=b*psref/psimg #Kron radius in units of a,b else: #for muse, transfer to same units xphot=x yphot=y rminphot=rcircap aphot=a bphot=b ##### #Compute local sky ##### skyreg=kn*kronrad*np.sqrt(aphot*bphot)+15 cutskymask=segmasktrans[yphot-skyreg:yphot+skyreg,xphot-skyreg:xphot+skyreg] cutskydata=dataflx[yphot-skyreg:yphot+skyreg,xphot-skyreg:xphot+skyreg] skymedian=np.nan_to_num(np.median(cutskydata[np.where(cutskymask < 1.0)])) #print skymedian #plt.imshow(cutskymask,origin='low') #plt.show() #if(idobj > 30): # exit() ######### #Now grab the Kron mag computed using detection image ######### #mask all other objects to avoid blending tmpdata=np.copy(dataflx) #apply local sky subtraction tmpdata=tmpdata-skymedian tmpvar=np.copy(datavar) tmpmask=np.copy(segmasktrans) pixels=np.where(tmpmask == idobj+1) tmpmask[pixels]=0 #plt.imshow(tmpmask,origin='low') #plt.show() #exit() #circular aperture if(use_circle): #flux in circular aperture flux_kron, err, flg = sep.sum_circle(tmpdata,xphot,yphot,rminphot,mask=tmpmask) #propagate variance fluxvar, err, flg = sep.sum_circle(tmpvar,xphot,yphot,rminphot,mask=tmpmask) #store Rused in arcsec rused=rminphot*psimg #update check aperture tmpcheckaper=np.zeros(dataflx.shape,dtype=bool) sep.mask_ellipse(tmpcheckaper,xphot,yphot,1.,1.,0.,r=rminphot) checkaperture=checkaperture+tmpcheckaper*(idobj+1) #kron apertures else: #kron flux flux_kron, err, flg = sep.sum_ellipse(tmpdata,xphot, yphot, aphot, bphot, theta, kn*kronrad, mask=tmpmask) #propagate variance fluxvar, err, flg = sep.sum_ellipse(tmpvar,xphot,yphot, aphot, bphot, theta, kn*kronrad, mask=tmpmask) #translate in radius rused=kn*kronrad*psimg*np.sqrt(aphot*bphot) #update check aperture tmpcheckaper=np.zeros(dataflx.shape,dtype=bool) sep.mask_ellipse(tmpcheckaper,xphot,yphot,aphot,bphot,theta,r=kn*kronrad) checkaperture=checkaperture+tmpcheckaper*(idobj+1) #compute error for aperture if(noise[0]): #use model appix=np.where(tmpcheckaper > 0) errflux_kron=noise[0]*noise[1]*len(appix[0])**noise[2] else: #propagate variance errflux_kron=np.sqrt(fluxvar) #go to mag if(flux_kron > 0): mag_aper=-2.5*np.log10(flux_kron)+img[0].header['ZPAB'] errmg_aper=2.5*np.log10(1.+errflux_kron/flux_kron) else: mag_aper=99.0 errmg_aper=99.0 #find out if non detections if(errflux_kron >= flux_kron): errmg_aper=9 mag_aper=-2.5*np.log10(2.*errflux_kron)+img[0].header['ZPAB'] ####### #grab the photometry in the segmentation map ##### #This may not work well for other instruments #if images are not well aligned pixels=np.where(segmasktrans == idobj+1) #add flux in pixels tmpdata=np.copy(dataflx) #apply sky sub tmpdata=tmpdata-skymedian flux_seg=np.sum(tmpdata[pixels]) #compute noise from model or adding variance if(noise[0]): #from model errfx_seg=noise[0]*noise[1]*len(pixels[0])**noise[2] else: #add variance in pixels to compute error errfx_seg=np.sqrt(np.sum(datavar[pixels])) #go to mag with calibrations if(flux_seg > 0): mag_seg=-2.5*np.log10(flux_seg)+img[0].header['ZPAB'] errmg_seg=2.5*np.log10(1.+errfx_seg/flux_seg) else: mag_seg=99.0 errmg_seg=99.0 #find out if non detections if(errfx_seg >= flux_seg): errmg_seg=9 mag_seg=-2.5*np.log10(2.*errfx_seg)+img[0].header['ZPAB'] #stash by id phot.add_row((idobj+1,mag_aper,errmg_aper,flux_kron,errflux_kron,rused,mag_seg,errmg_seg, flux_seg,errfx_seg,img[0].header['ZPAB'])) #dump the aperture check image hdumain = fits.PrimaryHDU(checkaperture,header=img[1].header) hdulist = fits.HDUList(hdumain) hdulist.writeto("{}_aper.fits".format(rname),clobber=True) #close cat.close() img.close() seg.close() det.close() return phot
def do_stage(self, images): for i, image in enumerate(images): try: # Set the number of source pixels to be 5% of the total. This keeps us safe from # satellites and airplanes. sep.set_extract_pixstack(int(image.nx * image.ny * 0.05)) data = image.data.copy() error = (np.abs(data) + image.readnoise ** 2.0) ** 0.5 mask = image.bpm > 0 # Fits can be backwards byte order, so fix that if need be and subtract # the background try: bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) except ValueError: data = data.byteswap(True).newbyteorder() bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) bkg.subfrom(data) # Do an initial source detection # TODO: Add back in masking after we are sure SEP works sources = sep.extract(data, self.threshold, minarea=self.min_area, err=error, deblend_cont=0.005) # Convert the detections into a table sources = Table(sources) # Calculate the ellipticity sources['ellipticity'] = 1.0 - (sources['b'] / sources['a']) # Fix any value of theta that are invalid due to floating point rounding # -pi / 2 < theta < pi / 2 sources['theta'][sources['theta'] > (np.pi / 2.0)] -= np.pi sources['theta'][sources['theta'] < (-np.pi / 2.0)] += np.pi # Calculate the kron radius kronrad, krflag = sep.kron_radius(data, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 6.0) sources['flag'] |= krflag sources['kronrad'] = kronrad # Calcuate the equivilent of flux_auto flux, fluxerr, flag = sep.sum_ellipse(data, sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * kronrad, subpix=1, err=error) sources['flux'] = flux sources['fluxerr'] = fluxerr sources['flag'] |= flag # Calculate the FWHMs of the stars: fwhm = 2.0 * (np.log(2) * (sources['a'] ** 2.0 + sources['b'] ** 2.0)) ** 0.5 sources['fwhm'] = fwhm # Cut individual bright pixels. Often cosmic rays sources = sources[fwhm > 1.0] # Measure the flux profile flux_radii, flag = sep.flux_radius(data, sources['x'], sources['y'], 6.0 * sources['a'], [0.25, 0.5, 0.75], normflux=sources['flux'], subpix=5) sources['flag'] |= flag sources['fluxrad25'] = flux_radii[:, 0] sources['fluxrad50'] = flux_radii[:, 1] sources['fluxrad75'] = flux_radii[:, 2] # Calculate the windowed positions sig = 2.0 / 2.35 * sources['fluxrad50'] xwin, ywin, flag = sep.winpos(data, sources['x'], sources['y'], sig) sources['flag'] |= flag sources['xwin'] = xwin sources['ywin'] = ywin # Calculate the average background at each source bkgflux, fluxerr, flag = sep.sum_ellipse(bkg.back(), sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * sources['kronrad'], subpix=1) #masksum, fluxerr, flag = sep.sum_ellipse(mask, sources['x'], sources['y'], # sources['a'], sources['b'], np.pi / 2.0, # 2.5 * kronrad, subpix=1) background_area = (2.5 * sources['kronrad']) ** 2.0 * sources['a'] * sources['b'] * np.pi # - masksum sources['background'] = bkgflux sources['background'][background_area > 0] /= background_area[background_area > 0] # Update the catalog to match fits convention instead of python array convention sources['x'] += 1.0 sources['y'] += 1.0 sources['xpeak'] += 1 sources['ypeak'] += 1 sources['xwin'] += 1.0 sources['ywin'] += 1.0 sources['theta'] = np.degrees(sources['theta']) image.catalog = sources['x', 'y', 'xwin', 'ywin', 'xpeak', 'ypeak', 'flux', 'fluxerr', 'background', 'fwhm', 'a', 'b', 'theta', 'kronrad', 'ellipticity', 'fluxrad25', 'fluxrad50', 'fluxrad75', 'x2', 'y2', 'xy', 'flag'] # Add the units and description to the catalogs image.catalog['x'].unit = 'pixel' image.catalog['x'].description = 'X coordinate of the object' image.catalog['y'].unit = 'pixel' image.catalog['y'].description = 'Y coordinate of the object' image.catalog['xwin'].unit = 'pixel' image.catalog['xwin'].description = 'Windowed X coordinate of the object' image.catalog['ywin'].unit = 'pixel' image.catalog['ywin'].description = 'Windowed Y coordinate of the object' image.catalog['xpeak'].unit = 'pixel' image.catalog['xpeak'].description = 'X coordinate of the peak' image.catalog['ypeak'].unit = 'pixel' image.catalog['ypeak'].description = 'Windowed Y coordinate of the peak' image.catalog['flux'].unit = 'counts' image.catalog['flux'].description = 'Flux within a Kron-like elliptical aperture' image.catalog['fluxerr'].unit = 'counts' image.catalog['fluxerr'].description = 'Erronr on the flux within a Kron-like elliptical aperture' image.catalog['background'].unit = 'counts' image.catalog['background'].description = 'Average background value in the aperture' image.catalog['fwhm'].unit = 'pixel' image.catalog['fwhm'].description = 'FWHM of the object' image.catalog['a'].unit = 'pixel' image.catalog['a'].description = 'Semi-major axis of the object' image.catalog['b'].unit = 'pixel' image.catalog['b'].description = 'Semi-minor axis of the object' image.catalog['theta'].unit = 'degrees' image.catalog['theta'].description = 'Position angle of the object' image.catalog['kronrad'].unit = 'pixel' image.catalog['kronrad'].description = 'Kron radius used for extraction' image.catalog['ellipticity'].description = 'Ellipticity' image.catalog['fluxrad25'].unit = 'pixel' image.catalog['fluxrad25'].description = 'Radius containing 25% of the flux' image.catalog['fluxrad50'].unit = 'pixel' image.catalog['fluxrad50'].description = 'Radius containing 50% of the flux' image.catalog['fluxrad75'].unit = 'pixel' image.catalog['fluxrad75'].description = 'Radius containing 75% of the flux' image.catalog['x2'].unit = 'pixel^2' image.catalog['x2'].description = 'Variance on X coordinate of the object' image.catalog['y2'].unit = 'pixel^2' image.catalog['y2'].description = 'Variance on Y coordinate of the object' image.catalog['xy'].unit = 'pixel^2' image.catalog['xy'].description = 'XY covariance of the object' image.catalog['flag'].description = 'Bit mask combination of extraction and photometry flags' image.catalog.sort('flux') image.catalog.reverse() logging_tags = logs.image_config_to_tags(image, self.group_by_keywords) logs.add_tag(logging_tags, 'filename', os.path.basename(image.filename)) # Save some background statistics in the header mean_background = stats.sigma_clipped_mean(bkg.back(), 5.0) image.header['L1MEAN'] = (mean_background, '[counts] Sigma clipped mean of frame background') logs.add_tag(logging_tags, 'L1MEAN', float(mean_background)) median_background = np.median(bkg.back()) image.header['L1MEDIAN'] = (median_background, '[counts] Median of frame background') logs.add_tag(logging_tags, 'L1MEDIAN', float(median_background)) std_background = stats.robust_standard_deviation(bkg.back()) image.header['L1SIGMA'] = (std_background, '[counts] Robust std dev of frame background') logs.add_tag(logging_tags, 'L1SIGMA', float(std_background)) # Save some image statistics to the header good_objects = image.catalog['flag'] == 0 seeing = np.median(image.catalog['fwhm'][good_objects]) * image.pixel_scale image.header['L1FWHM'] = (seeing, '[arcsec] Frame FWHM in arcsec') logs.add_tag(logging_tags, 'L1FWHM', float(seeing)) mean_ellipticity = stats.sigma_clipped_mean(sources['ellipticity'][good_objects], 3.0) image.header['L1ELLIP'] = (mean_ellipticity, 'Mean image ellipticity (1-B/A)') logs.add_tag(logging_tags, 'L1ELLIP', float(mean_ellipticity)) mean_position_angle = stats.sigma_clipped_mean(sources['theta'][good_objects], 3.0) image.header['L1ELLIPA'] = (mean_position_angle, '[deg] PA of mean image ellipticity') logs.add_tag(logging_tags, 'L1ELLIPA', float(mean_position_angle)) self.logger.info('Extracted sources', extra=logging_tags) except Exception as e: logging_tags = logs.image_config_to_tags(image, self.group_by_keywords) logs.add_tag(logging_tags, 'filename', os.path.basename(image.filename)) self.logger.error(e, extra=logging_tags) return images
def _run_sep(self): import sep # THRESH=1.2 # in sky sigma # DETECT_THRESH=1.6 # in sky sigma # DEBLEND_MINCONT=0.005 # DETECT_MINAREA = 6 # minimum number of pixels above threshold objs, seg = sep.extract( self.detim, self.detect_thresh, err=self.detnoise, segmentation_map=True, **self.sx_config ) logger.debug('found %d objects' % objs.size) if objs.size == 0: self.cat = objs return None flux_auto = np.zeros(objs.size)-9999.0 fluxerr_auto = np.zeros(objs.size)-9999.0 flux_radius = np.zeros(objs.size)-9999.0 kron_radius = np.zeros(objs.size)-9999.0 w, = np.where( (objs['a'] >= 0.0) & (objs['b'] >= 0.0) & between(objs['theta'], -pi/2., pi/2., type='[]') ) if w.size > 0: kron_radius[w], krflag = sep.kron_radius( self.detim, objs['x'][w], objs['y'][w], objs['a'][w], objs['b'][w], objs['theta'][w], 6.0, ) objs['flag'][w] |= krflag aper_rad = 2.5*kron_radius flux_auto[w], fluxerr_auto[w], flag_auto = \ sep.sum_ellipse( self.detim, objs['x'][w], objs['y'][w], objs['a'][w], objs['b'][w], objs['theta'][w], aper_rad[w], subpix=1, ) objs['flag'][w] |= flag_auto # what we did in DES, but note threshold above # is 1 as opposed to wide survey. deep survey # was even lower, 0.8? # used half light radius PHOT_FLUXFRAC = 0.5 flux_radius[w], frflag = sep.flux_radius( self.detim, objs['x'][w], objs['y'][w], 6.*objs['a'][w], PHOT_FLUXFRAC, normflux=flux_auto[w], subpix=5, ) objs['flag'][w] |= frflag # combine flags into 'flag' ncut = 2 # need this to make sure array new_dt = [ ('id', 'i8'), ('number', 'i4'), ('ncutout', 'i4'), ('kron_radius', 'f4'), ('flux_auto', 'f4'), ('fluxerr_auto', 'f4'), ('flux_radius', 'f4'), ('isoarea_image', 'f4'), ('iso_radius', 'f4'), ('box_size', 'i4'), ('file_id', 'i8', ncut), ('orig_row', 'f4', ncut), ('orig_col', 'f4', ncut), ('orig_start_row', 'i8', ncut), ('orig_start_col', 'i8', ncut), ('orig_end_row', 'i8', ncut), ('orig_end_col', 'i8', ncut), ('cutout_row', 'f4', ncut), ('cutout_col', 'f4', ncut), ('dudrow', 'f8', ncut), ('dudcol', 'f8', ncut), ('dvdrow', 'f8', ncut), ('dvdcol', 'f8', ncut), ] cat = eu.numpy_util.add_fields(objs, new_dt) cat['id'] = np.arange(cat.size) cat['number'] = np.arange(1, cat.size+1) cat['ncutout'] = 1 cat['flux_auto'] = flux_auto cat['fluxerr_auto'] = fluxerr_auto cat['flux_radius'] = flux_radius wcs = self.datalist[0]['wcs'] cat['dudrow'][:, 0] = wcs.dudy cat['dudcol'][:, 0] = wcs.dudx cat['dvdrow'][:, 0] = wcs.dvdy cat['dvdcol'][:, 0] = wcs.dvdx # use the number of pixels in the seg map as the iso area for i in range(objs.size): w = np.where(seg == (i+1)) cat['isoarea_image'][i] = w[0].size cat['iso_radius'] = np.sqrt(cat['isoarea_image'].clip(min=1)/np.pi) box_size = self._get_box_sizes(cat) half_box_size = box_size//2 maxrow, maxcol = self.detim.shape cat['box_size'] = box_size cat['orig_row'][:, 0] = cat['y'] cat['orig_col'][:, 0] = cat['x'] orow = cat['orig_row'][:, 0].astype('i4') ocol = cat['orig_col'][:, 0].astype('i4') ostart_row = orow - half_box_size + 1 ostart_col = ocol - half_box_size + 1 oend_row = orow + half_box_size + 1 # plus one for slices oend_col = ocol + half_box_size + 1 ostart_row.clip(min=0, out=ostart_row) ostart_col.clip(min=0, out=ostart_col) oend_row.clip(max=maxrow, out=oend_row) oend_col.clip(max=maxcol, out=oend_col) # could result in smaller than box_size above cat['orig_start_row'][:, 0] = ostart_row cat['orig_start_col'][:, 0] = ostart_col cat['orig_end_row'][:, 0] = oend_row cat['orig_end_col'][:, 0] = oend_col cat['cutout_row'][:, 0] = \ cat['orig_row'][:, 0] - cat['orig_start_row'][:, 0] cat['cutout_col'][:, 0] = \ cat['orig_col'][:, 0] - cat['orig_start_col'][:, 0] self.seg = seg self.bmask = np.zeros(seg.shape, dtype='i4') self.cat = cat
def detect_with_sep(event, detect_thresh=2., npixels=8, grow_seg=5, gauss_fwhm=2., gsize=3, im_wcs=None, root='mycat'): drz_file = event['fits_s3_key'] drz_file_bucket = event['fits_s3_bucket'] root = drz_file.split('/')[-1].split('_')[0] s3 = boto3.resource('s3') s3_client = boto3.client('s3') bkt = s3.Bucket(drz_file_bucket) bkt.download_file(drz_file, '/tmp/{0}'.format(root), ExtraArgs={"RequestPayer": "requester"}) im = fits.open('/tmp/{0}'.format(root)) im_wcs = wcs.WCS(im[1].header, relax=True) data = im[1].data.byteswap().newbyteorder() wht_data = im[2].data.byteswap().newbyteorder() data_mask = np.cast[data.dtype](data == 0) ## Get AB zeropoint if 'PHOTFNU' in im[0].header: ZP = -2.5 * np.log10(im[0].header['PHOTFNU']) + 8.90 elif 'PHOTFLAM' in im[0].header: ZP = (-2.5 * np.log10(im[0].header['PHOTFLAM']) - 21.10 - 5 * np.log10(im[0].header['PHOTPLAM']) + 18.6921) else: print( 'Couldn\'t find PHOTFNU or PHOTPLAM/PHOTFLAM keywords, use ZP=25') return None # Scale fluxes to mico-Jy uJy_to_dn = 1 / (3631 * 1e6 * 10**(-0.4 * ZP)) err = 1 / np.sqrt(wht_data) # set up the error array err = 1 / np.sqrt(wht_data) err[~np.isfinite(err)] = 0 mask = (err == 0) # get the background bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) bkg_data = bkg.back() ratio = bkg.rms() / err err_scale = np.median(ratio[(~mask) & np.isfinite(ratio)]) err *= err_scale objects = sep.extract(data - bkg_data, detect_thresh, err=err, mask=mask, minarea=14, filter_kernel=GAUSS_3_7x7, filter_type='conv', deblend_nthresh=32, deblend_cont=0.005, clean=True, clean_param=1., segmentation_map=False) catalog = Table(objects) # add things to catalog autoparams = [2.5, 3.5] catalog['number'] = np.arange(len(catalog), dtype=np.int32) + 1 catalog['theta'] = np.clip(catalog['theta'], -np.pi / 2, np.pi / 2) for c in ['a', 'b', 'x', 'y', 'theta']: catalog = catalog[np.isfinite(catalog[c])] catalog['ra'], catalog['dec'] = im_wcs.all_pix2world( catalog['x'], catalog['y'], 1) catalog['ra'].unit = u.deg catalog['dec'].unit = u.deg catalog['x_world'], catalog['y_world'] = catalog['ra'], catalog['dec'] kronrad, krflag = sep.kron_radius(data - bkg_data, catalog['x'], catalog['y'], catalog['a'], catalog['b'], catalog['theta'], 6.0) kronrad *= autoparams[0] kronrad[~np.isfinite(kronrad)] = autoparams[1] kronrad = np.maximum(kronrad, autoparams[1]) kron_out = sep.sum_ellipse(data - bkg_data, catalog['x'], catalog['y'], catalog['a'], catalog['b'], catalog['theta'], kronrad, subpix=5, err=err) kron_flux, kron_fluxerr, kron_flag = kron_out kron_flux_flag = kron_flag catalog['mag_auto_raw'] = ZP - 2.5 * np.log10(kron_flux) catalog['magerr_auto_raw'] = 2.5 / np.log(10) * kron_fluxerr / kron_flux catalog['mag_auto'] = catalog['mag_auto_raw'] * 1. catalog['magerr_auto'] = catalog['magerr_auto_raw'] * 1. catalog['kron_radius'] = kronrad * u.pixel catalog['kron_flag'] = krflag catalog['kron_flux_flag'] = kron_flux_flag # Make a plot im_data = im[1].data im_shape = im_data.shape im_data[np.isnan(im_data)] = 0.0 # Trim the top and bottom 1 percent of pixel values top = np.percentile(im_data, 99) im_data[im_data > top] = top bottom = np.percentile(im_data, 1) im_data[im_data < bottom] = bottom # Scale the data. im_data = im_data - im_data.min() im_data = (im_data / im_data.max()) * 255. im_data = np.uint8(im_data) f, (ax) = plt.subplots(1, 1, sharex=True) f.set_figheight(12) f.set_figwidth(12) ax.imshow(im_data, cmap="Greys", clim=(0, 255), origin='lower') ax.plot(catalog['x'], catalog['y'], 'o', markeredgewidth=1, markeredgecolor='red', markerfacecolor='None') ax.set_xlim([-0.05 * im_shape[1], 1.05 * im_shape[1]]) ax.set_ylim([-0.05 * im_shape[0], 1.05 * im_shape[0]]) f.savefig('/tmp/{0}.png'.format(root)) # Write the catalog to local disk catalog.write('/tmp/{0}.catalog.fits'.format(root), format='fits') # Write out to S3 s3 = boto3.resource('s3') s3.meta.client.upload_file('/tmp/{0}.catalog.fits'.format(root), event['s3_output_bucket'], '{0}/{1}.catalog.fits'.format(root, root)) s3.meta.client.upload_file('/tmp/{0}.png'.format(root), event['s3_output_bucket'], 'PNG/{0}.png'.format(root))
def run(self): """ Runs the calibrating algorithm. The calibrated data is returned in self.dataout """ ### Preparation binning = self.datain.getheadval('XBIN') ### Run Source Extractor # Make sure input data exists as file if not os.path.exists(self.datain.filename): self.datain.save() ''' # Make catalog filename catfilename = self.datain.filenamebegin if catfilename[-1] in '._-': catfilename += 'sex_cat.fits' else: catfilename += '.sex_cat.fits' # Make background filename (may not be used - see below) bkgdfilename = self.datain.filenamebegin if bkgdfilename[-1] in '._-': bkgdfilename += 'SxBkgd.fits' else: bkgdfilename += '_SxBkgd.fits' self.log.debug('Sextractor catalog filename = %s' % catfilename) ''' #Open data out of fits file for use in SEP image = self.datain.image #Set values for variables used later #These variables are used for the background analysis. bw and bh I found just testing various numbers maskthresh = 0.0 bw, bh = 10, 10 fw, fh = 3, 3 fthresh = 0.0 #Create the background image and it's error bkg = sep.Background( image, maskthresh=maskthresh, bw=bw, bh=bh, fw=fw, fh=fh, fthresh=fthresh) #have sep determine the background of the image bkg_image = bkg.back() bkg_rms = bkg.rms() #Subtract the background from the image image_sub = image - bkg_image imsubmed = np.nanmedian(image_sub) imsubmad = mad_std(image_sub) #Create variables that are used during source Extraction extract_thresh = 5 extract_err = bkg_rms #Extract sources from the subtracted image objects = sep.extract(image_sub, extract_thresh, err=extract_err) #Define variables used later during flux calculation sum_c = np.zeroes(len(objects)) sum_c_err = np.zeroes(len(objects)) sum_c_flags = np.zeroes(len(objects)) ratio = np.zeroes(len(objects)) rmax = np.zeroes(len(objects)) dx = np.zeros(len(objects)) dy = np.zeros(len(objects)) #Do basic uncalibrated measurments of flux for use in step astrometry. #First we calculate flux using Ellipses. In order to do this we need to calculate the Kron Radius #For the ellipses Extract identified using the ellipse parameters it gives #R is equal to 6 as that is the default used in Source Extractor kronrad, krflag = sep.kron_radius(image_sub, objects['x'], objects['y'], objects['a'], objects['b'], objects['theta'], r=6) #Using this Kron radius we calculate the flux, this is equivallent to FLUX_AUTO in SExtractor flux_elip, fluxerr_elip, flag = sep.sum_ellipse(image_sub, objects['x'], objects['y'], objects['a'], objects['b'], objects['theta'], 2.5 * kronrad, err=bkg_rms, subpix=1) #Then we calculate it using Circular Apetures, this will be used to remove sources that are too elipitical flux_circ, fluxerr_circ, flag = sep.sum_circle(image_sub, objects['x'], objects['y'], r=2.5, err=bkg_rms, subpix=1) ### Extract catalog from source extractor and clean up dataset # Use catalog from sourse extrator (test.cat) #seo_catalog = astropy.table.Table.read(catfilename, format="fits", hdu='LDAC_OBJECTS') seo_Mag = -2.5 * np.log10(flux_elip) seo_MagErr = (2.5 / np.log(10) * (fluxerr_elip / flux_elip)) # Select only the stars in the image: circular image and S/N > 10 elongation = (flux_circ - flux_elip) < 250 seo_SN = ((flux_elip / fluxerr_elip) > 10) seo_SN = (seo_SN) & (elongation) & ( (flux_elip / fluxerr_elip) < 1000) & (fluxerr_elip != 0) self.log.debug('Selected %d stars from Source Extrator catalog' % np.count_nonzero(seo_SN)) ### Query and extract data from Guide Star Catalog # Get RA / Dec ''' ra_center = self.datain.getheadval('RA' ).split(':') dec_center = self.datain.getheadval('DEC').split(':') ra_cent = ' '.join([str(s) for s in ra_center]) dec_cent = ' '.join([str(s) for s in dec_center]) center_coordinates = SkyCoord(ra_cent + ' ' + dec_cent, unit=(u.hourangle, u.deg) ) self.log.debug('Using RA/Dec = %s / %s' % (center_coordinates.ra, center_coordinates.dec) ) # Querry guide star catalog2 with center coordinates gsc2_query = 'http://gsss.stsci.edu/webservices/vo/CatalogSearch.aspx?' gsc2_query += 'RA='+str(center_coordinates.ra.value) gsc2_query += '&DEC='+str(center_coordinates.dec.value) gsc2_query += '&DSN=+&FORMAT=CSV&CAT=GSC241&SR=0.5&' self.log.debug('Running URL = %s' % gsc2_query) gsc2_result = requests.get(gsc2_query) # Get data from result filter_map = self.getarg('filtermap').split('|') filter_name = filter_tel = self.datain.getheadval('FILTER') for fil in filter_map: entry = fil.split('=') if entry[0] == filter_tel: try: filter_name = entry[1] except: self.log.error("Badly formatted filter mapping. No '=' after %s" % filter_tel) query_table = astropy.io.ascii.read(gsc2_result.text) table_filter = 'SDSS'+filter_name+'Mag' table_filter_err = 'SDSS'+filter_name+'MagErr' GSC_RA = query_table['ra'][(query_table[table_filter]<22) & (query_table[table_filter]>0)] GSC_DEC = query_table['dec'][(query_table[table_filter]<22) & (query_table[table_filter]>0)] GSC_Mag = query_table[table_filter][(query_table[table_filter]<22) & (query_table[table_filter]>0)] GSC_MagErr = query_table[table_filter_err][(query_table[table_filter]<22) & (query_table[table_filter]>0)] self.log.debug('Received %d entries from Guide Star Catalog' % len(GSC_RA)) ### Mach Guide Star Catalog data with data from Source Extractor # Do the matching seo_radec = SkyCoord(ra=seo_catalog['ALPHA_J2000'], dec=seo_catalog['DELTA_J2000']) GSC_radec = SkyCoord(ra=GSC_RA*u.deg, dec=GSC_DEC*u.deg) idx, d2d, d3d = GSC_radec.match_to_catalog_sky(seo_radec[seo_SN]) # only select objects less than 0.025 away in distance, get distance value dist_value = 1*0.76*binning/3600. #Maximum distance is 1 pixel mask = d2d.value<dist_value if(np.sum(mask) < 2): self.log.warn('Only %d sources match between image and guide star catalog, fit may not work' % np.sum(mask) ) self.log.debug('Distance_Value = %f, Min(distances) = %f, Mask length = %d' % ( dist_value, np.min(d2d.value), np.sum(mask) ) ) ### Calculate the fit correction between the guide star and the extracted values # Make lambda function to be minimized # The fit finds m_ml and b_ml where # seo_Mag = b_ml + m_ml * GSC_Mag nll = lambda *args: -residual(*args) # Get errors eps_data = np.sqrt(GSC_MagErr**2+seo_MagErr[seo_SN][idx]**2) # Make estimate for intercept to give as initial guess b_ml0 = np.median(seo_Mag[seo_SN][idx][mask]-GSC_Mag[mask]) self.log.debug('Offset guess is %f mag' % b_ml0) # Calculate distance from that guess and get StdDev of distances guessdistances = np.abs( b_ml0 - ( seo_Mag[seo_SN][idx] - GSC_Mag ) ) guessdistmed = np.median(guessdistances[mask]) # Update mask to ignore values with large STDEVS mask = np.logical_and( d2d.value < dist_value, guessdistances < 5 * guessdistmed ) self.log.debug('Median of distance to guess = %f, Mask length = %d' % ( guessdistmed, np.sum(mask) ) ) # Solve linear equation result = scipy.optimize.minimize(nll, [1, b_ml0], args=(GSC_Mag[mask], seo_Mag[seo_SN][idx][mask], eps_data[mask])) m_ml, b_ml = result["x"] self.log.info('Fitted offset is %f mag, fitted slope is %f' % (b_ml, m_ml) ) b_ml_corr = b_ml + (m_ml-1) * np.median(GSC_Mag[mask]) self.log.info('Corrected offset is %f mag' % b_ml_corr) ''' ### Make table with all data from source extractor # Collect data columns cols = [] num = np.arange(1, len(objects['x']) + 1) cols.append(fits.Column(name='ID', format='D', array=num)) cols.append( fits.Column(name='X', format='D', array=objects['x'][seo_SN], unit='pixel')) cols.append( fits.Column(name='Y', format='D', array=objects['y'][seo_SN], unit='pixel')) cols.append( fits.Column(name='Uncalibrated Magnitude', format='D', array=seo_Mag, unit='magnitude')) cols.append( fits.Column(name='Uncalibrated Magnitude_Err', format='D', array=seo_MagErr, unit='magnitude')) # Make table c = fits.ColDefs(cols) sources_table = fits.BinTableHDU.from_columns(c) ''' ### Make table with data which was fit # Collect data columns cols = [] cols.append(fits.Column(name='RA', format='D', array=GSC_RA[mask], unit='deg')) cols.append(fits.Column(name='Dec', format='D', array=GSC_DEC[mask], unit='deg')) cols.append(fits.Column(name='Diff_Deg', format='D', array=d2d[mask], unit='deg')) cols.append(fits.Column(name='GSC_Mag', format='D', array=GSC_Mag[mask], unit='magnitude')) cols.append(fits.Column(name='Img_Mag', format='D', array=seo_Mag[seo_SN][idx][mask], unit='magnitude')) cols.append(fits.Column(name='Error', format='D', array=eps_data[mask], unit='magnitude')) # Make table c = fits.ColDefs(cols) fitdata_table = fits.BinTableHDU.from_columns(c) ''' ### Make output data # Copy data from datain self.dataout = self.datain ''' # Add Photometric Zero point magnitude self.dataout.setheadval('PHTZPRAW', -b_ml_corr, 'Photometric zeropoint for RAW data') self.dataout.setheadval('PTZRAWER', 0.0, 'Uncertainty of the RAW photometric zeropoint') self.dataout.setheadval('PHOTZP', 8.9, 'Photometric zeropoint MAG=-2.5*log(data)+PHOTZP') self.dataout.setheadval('BUNIT', 'Jy/pixel', 'Units for the data') ''' # Scale the image using calculated b_ml_corr #image_background = fits.open(bkgdfilename)[0].data #bzero = np.nanpercentile(self.dataout.image,self.getarg('zeropercent')) #bzero = image_background #-- Alternative bzero idea: #-mask = image_array < np.percentile(image,90) #-bzero = np.median(image_array[mask]) #bscale = 3631. * 10 ** (b_ml_corr/2.5) #self.dataout.image = bscale * (self.dataout.image - bzero) # Add sources and fitdata table self.dataout.tableset(sources_table.data, 'Sources', sources_table.header) #self.dataout.tableset(fitdata_table.data,'Fit Data',fitdata_table.header) ''' ### If requested make a plot of the fit and save as png if self.getarg('fitplot'): # Set up plot plt.figure(figsize=(10,7)) # Plot 5sigma error range gmin = min(GSC_Mag[mask]) gmax = max(GSC_Mag[mask]) plt.fill([gmin,gmin,gmax,gmax],[gmin+b_ml0-guessdistmed, gmin+b_ml0+guessdistmed, gmax+b_ml0+guessdistmed, gmax+b_ml0-guessdistmed],'c') # Plot fits plt.plot(GSC_Mag[mask],m_ml*GSC_Mag[mask]+b_ml) plt.plot(GSC_Mag[mask],GSC_Mag[mask]+b_ml0) # Plot the datapoints plt.errorbar(GSC_Mag[d2d.value<dist_value],seo_Mag[seo_SN][idx][d2d.value<dist_value], yerr=np.sqrt(eps_data[d2d.value<dist_value]**2),fmt='o',linestyle='none') plt.errorbar(GSC_Mag[mask],seo_Mag[seo_SN][idx][mask], yerr=np.sqrt(eps_data[mask]**2),fmt='o',linestyle='none') #plt.plot(GSC_Mag[d2d.value<dist_value],m_ml*GSC_Mag[d2d.value<dist_value]+zeropoint_fit[1]) plt.legend(['LM-fit','Fit-Guess','GuessDistMed Range','d<distval Data','Good Data']) plt.ylabel('Source extrator magnitude') plt.xlabel('Star catalog magnitude') plt.title('Calibration Fit for file\n' + os.path.split(self.dataout.filename)[1]) # Plot the fit # Axis and labels # Save the image pngname = self.dataout.filenamebegin + 'FCALplot.png' plt.savefig(pngname) self.log.debug('Saved fit plot under %s' % pngname) ''' ### If requested make a text file with the sources list if self.getarg('sourcetable'): # Save region file filename = self.dataout.filenamebegin + 'FCALsources.reg' with open(filename, 'w+') as f: f.write("# Region file format: DS9 version 4.1\n") f.write( """global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1 image\n""" ) for i in range(len(seo_catalog['x'][seo_SN])): f.write("circle(%.7f,%.7f,0.005) # text={%i}\n" % (seo_catalog['x'][seo_SN][i], seo_catalog['y'][seo_SN][i], num[i])) # Save the table txtname = self.dataout.filenamebegin + 'FCALsources.txt' ascii.write(self.dataout.tableget('Sources'), txtname, format=self.getarg('sourcetableformat')) self.log.debug('Saved sources table under %s' % txtname)
def find_stars(self, image: Image) -> Table: """Find stars in given image and append catalog. Args: image: Image to find stars in. Returns: Full table with results. """ import sep # get data and make it continuous data = image.data.copy() # mask? mask = image.mask.data if image.mask is not None else None # estimate background, probably we need to byte swap, and subtract it try: bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) except ValueError as e: data = data.byteswap(True).newbyteorder() bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) bkg.subfrom(data) # extract sources try: sources = sep.extract(data, self.threshold, err=bkg.globalrms, minarea=self.minarea, deblend_nthresh=self.deblend_nthresh, deblend_cont=self.deblend_cont, clean=self.clean, clean_param=self.clean_param, mask=mask) except: log.exception('An error has occured.') return Table() # convert to astropy table sources = Table(sources) # only keep sources with detection flag < 8 sources = sources[sources['flag'] < 8] # Calculate the ellipticity sources['ellipticity'] = 1.0 - (sources['b'] / sources['a']) # calculate the FWHMs of the stars fwhm = 2.0 * (np.log(2) * (sources['a'] ** 2.0 + sources['b'] ** 2.0)) ** 0.5 sources['fwhm'] = fwhm # get gain gain = image.header['DET-GAIN'] if 'DET-GAIN' in image.header else None # Kron radius kronrad, krflag = sep.kron_radius(data, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 6.0) sources['flag'] |= krflag sources['kronrad'] = kronrad # equivalent of FLUX_AUTO flux, fluxerr, flag = sep.sum_ellipse(data, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 2.5 * kronrad, subpix=1, mask=mask, err=bkg.rms(), gain=gain) sources['flag'] |= flag sources['flux'] = flux sources['fluxerr'] = fluxerr # radii at 0.25, 0.5, and 0.75 flux flux_radii, flag = sep.flux_radius(data, sources['x'], sources['y'], 6.0 * sources['a'], [0.25, 0.5, 0.75], normflux=sources['flux'], subpix=5) sources['flag'] |= flag sources['fluxrad25'] = flux_radii[:, 0] sources['fluxrad50'] = flux_radii[:, 1] sources['fluxrad75'] = flux_radii[:, 2] # xwin/ywin sig = 2. / 2.35 * sources['fluxrad50'] xwin, ywin, flag = sep.winpos(data, sources['x'], sources['y'], sig) sources['flag'] |= flag sources['xwin'] = xwin sources['ywin'] = ywin # perform aperture photometry for diameters of 1" to 8" for diameter in [1, 2, 3, 4, 5, 6, 7, 8]: flux, fluxerr, flag = sep.sum_circle(data, sources['x'], sources['y'], diameter / 2. / image.pixel_scale, mask=mask, err=bkg.rms(), gain=gain) sources['fluxaper{0}'.format(diameter)] = flux sources['fluxerr{0}'.format(diameter)] = fluxerr sources['flag'] |= flag # average background at each source # since SEP sums up whole pixels, we need to do the same on an image of ones for the background_area bkgflux, fluxerr, flag = sep.sum_ellipse(bkg.back(), sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * sources['kronrad'], subpix=1) background_area, _, _ = sep.sum_ellipse(np.ones(shape=bkg.back().shape), sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * sources['kronrad'], subpix=1) sources['background'] = bkgflux sources['background'][background_area > 0] /= background_area[background_area > 0] # match fits conventions sources['x'] += 1.0 sources['xpeak'] += 1 sources['xwin'] += 1.0 sources['xmin'] += 1 sources['xmax'] += 1 sources['y'] += 1.0 sources['ypeak'] += 1 sources['ywin'] += 1.0 sources['ymin'] += 1 sources['ymax'] += 1 sources['theta'] = np.degrees(sources['theta']) # pick columns for catalog cat = sources['x', 'y', 'xwin', 'ywin', 'xpeak', 'ypeak', 'flux', 'fluxerr', 'peak', 'fluxaper1', 'fluxerr1', 'fluxaper2', 'fluxerr2', 'fluxaper3', 'fluxerr3', 'fluxaper4', 'fluxerr4', 'fluxaper5', 'fluxerr5', 'fluxaper6', 'fluxerr6', 'fluxaper7', 'fluxerr7', 'fluxaper8', 'fluxerr8', 'background', 'fwhm', 'a', 'b', 'theta', 'kronrad', 'ellipticity', 'fluxrad25', 'fluxrad50', 'fluxrad75', 'x2', 'y2', 'xy', 'flag'] # set it image.catalog = cat # return full catalog return sources
def do_stage(self, image): try: # Set the number of source pixels to be 5% of the total. This keeps us safe from # satellites and airplanes. sep.set_extract_pixstack(int(image.nx * image.ny * 0.05)) data = image.data.copy() error = (np.abs(data) + image.readnoise**2.0)**0.5 mask = image.bpm > 0 # Fits can be backwards byte order, so fix that if need be and subtract # the background try: bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) except ValueError: data = data.byteswap(True).newbyteorder() bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) bkg.subfrom(data) # Do an initial source detection # TODO: Add back in masking after we are sure SEP works sources = sep.extract(data, self.threshold, minarea=self.min_area, err=error, deblend_cont=0.005) # Convert the detections into a table sources = Table(sources) # We remove anything with a detection flag >= 8 # This includes memory overflows and objects that are too close the edge sources = sources[sources['flag'] < 8] sources = array_utils.prune_nans_from_table(sources) # Calculate the ellipticity sources['ellipticity'] = 1.0 - (sources['b'] / sources['a']) # Fix any value of theta that are invalid due to floating point rounding # -pi / 2 < theta < pi / 2 sources['theta'][sources['theta'] > (np.pi / 2.0)] -= np.pi sources['theta'][sources['theta'] < (-np.pi / 2.0)] += np.pi # Calculate the kron radius kronrad, krflag = sep.kron_radius(data, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 6.0) sources['flag'] |= krflag sources['kronrad'] = kronrad # Calcuate the equivilent of flux_auto flux, fluxerr, flag = sep.sum_ellipse(data, sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * kronrad, subpix=1, err=error) sources['flux'] = flux sources['fluxerr'] = fluxerr sources['flag'] |= flag # Do circular aperture photometry for diameters of 1" to 6" for diameter in [1, 2, 3, 4, 5, 6]: flux, fluxerr, flag = sep.sum_circle(data, sources['x'], sources['y'], diameter / 2.0 / image.pixel_scale, gain=1.0, err=error) sources['fluxaper{0}'.format(diameter)] = flux sources['fluxerr{0}'.format(diameter)] = fluxerr sources['flag'] |= flag # Calculate the FWHMs of the stars: fwhm = 2.0 * (np.log(2) * (sources['a']**2.0 + sources['b']**2.0))**0.5 sources['fwhm'] = fwhm # Cut individual bright pixels. Often cosmic rays sources = sources[fwhm > 1.0] # Measure the flux profile flux_radii, flag = sep.flux_radius(data, sources['x'], sources['y'], 6.0 * sources['a'], [0.25, 0.5, 0.75], normflux=sources['flux'], subpix=5) sources['flag'] |= flag sources['fluxrad25'] = flux_radii[:, 0] sources['fluxrad50'] = flux_radii[:, 1] sources['fluxrad75'] = flux_radii[:, 2] # Calculate the windowed positions sig = 2.0 / 2.35 * sources['fluxrad50'] xwin, ywin, flag = sep.winpos(data, sources['x'], sources['y'], sig) sources['flag'] |= flag sources['xwin'] = xwin sources['ywin'] = ywin # Calculate the average background at each source bkgflux, fluxerr, flag = sep.sum_ellipse(bkg.back(), sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * sources['kronrad'], subpix=1) # masksum, fluxerr, flag = sep.sum_ellipse(mask, sources['x'], sources['y'], # sources['a'], sources['b'], np.pi / 2.0, # 2.5 * kronrad, subpix=1) background_area = ( 2.5 * sources['kronrad'] )**2.0 * sources['a'] * sources['b'] * np.pi # - masksum sources['background'] = bkgflux sources['background'][background_area > 0] /= background_area[ background_area > 0] # Update the catalog to match fits convention instead of python array convention sources['x'] += 1.0 sources['y'] += 1.0 sources['xpeak'] += 1 sources['ypeak'] += 1 sources['xwin'] += 1.0 sources['ywin'] += 1.0 sources['theta'] = np.degrees(sources['theta']) catalog = sources['x', 'y', 'xwin', 'ywin', 'xpeak', 'ypeak', 'flux', 'fluxerr', 'peak', 'fluxaper1', 'fluxerr1', 'fluxaper2', 'fluxerr2', 'fluxaper3', 'fluxerr3', 'fluxaper4', 'fluxerr4', 'fluxaper5', 'fluxerr5', 'fluxaper6', 'fluxerr6', 'background', 'fwhm', 'a', 'b', 'theta', 'kronrad', 'ellipticity', 'fluxrad25', 'fluxrad50', 'fluxrad75', 'x2', 'y2', 'xy', 'flag'] # Add the units and description to the catalogs catalog['x'].unit = 'pixel' catalog['x'].description = 'X coordinate of the object' catalog['y'].unit = 'pixel' catalog['y'].description = 'Y coordinate of the object' catalog['xwin'].unit = 'pixel' catalog['xwin'].description = 'Windowed X coordinate of the object' catalog['ywin'].unit = 'pixel' catalog['ywin'].description = 'Windowed Y coordinate of the object' catalog['xpeak'].unit = 'pixel' catalog['xpeak'].description = 'X coordinate of the peak' catalog['ypeak'].unit = 'pixel' catalog['ypeak'].description = 'Windowed Y coordinate of the peak' catalog['flux'].unit = 'count' catalog[ 'flux'].description = 'Flux within a Kron-like elliptical aperture' catalog['fluxerr'].unit = 'count' catalog[ 'fluxerr'].description = 'Error on the flux within Kron aperture' catalog['peak'].unit = 'count' catalog['peak'].description = 'Peak flux (flux at xpeak, ypeak)' for diameter in [1, 2, 3, 4, 5, 6]: catalog['fluxaper{0}'.format(diameter)].unit = 'count' catalog['fluxaper{0}'.format( diameter )].description = 'Flux from fixed circular aperture: {0}" diameter'.format( diameter) catalog['fluxerr{0}'.format(diameter)].unit = 'count' catalog['fluxerr{0}'.format( diameter )].description = 'Error on Flux from circular aperture: {0}"'.format( diameter) catalog['background'].unit = 'count' catalog[ 'background'].description = 'Average background value in the aperture' catalog['fwhm'].unit = 'pixel' catalog['fwhm'].description = 'FWHM of the object' catalog['a'].unit = 'pixel' catalog['a'].description = 'Semi-major axis of the object' catalog['b'].unit = 'pixel' catalog['b'].description = 'Semi-minor axis of the object' catalog['theta'].unit = 'degree' catalog['theta'].description = 'Position angle of the object' catalog['kronrad'].unit = 'pixel' catalog['kronrad'].description = 'Kron radius used for extraction' catalog['ellipticity'].description = 'Ellipticity' catalog['fluxrad25'].unit = 'pixel' catalog[ 'fluxrad25'].description = 'Radius containing 25% of the flux' catalog['fluxrad50'].unit = 'pixel' catalog[ 'fluxrad50'].description = 'Radius containing 50% of the flux' catalog['fluxrad75'].unit = 'pixel' catalog[ 'fluxrad75'].description = 'Radius containing 75% of the flux' catalog['x2'].unit = 'pixel^2' catalog[ 'x2'].description = 'Variance on X coordinate of the object' catalog['y2'].unit = 'pixel^2' catalog[ 'y2'].description = 'Variance on Y coordinate of the object' catalog['xy'].unit = 'pixel^2' catalog['xy'].description = 'XY covariance of the object' catalog[ 'flag'].description = 'Bit mask of extraction/photometry flags' catalog.sort('flux') catalog.reverse() # Save some background statistics in the header mean_background = stats.sigma_clipped_mean(bkg.back(), 5.0) image.header['L1MEAN'] = ( mean_background, '[counts] Sigma clipped mean of frame background') median_background = np.median(bkg.back()) image.header['L1MEDIAN'] = (median_background, '[counts] Median of frame background') std_background = stats.robust_standard_deviation(bkg.back()) image.header['L1SIGMA'] = ( std_background, '[counts] Robust std dev of frame background') # Save some image statistics to the header good_objects = catalog['flag'] == 0 for quantity in ['fwhm', 'ellipticity', 'theta']: good_objects = np.logical_and( good_objects, np.logical_not(np.isnan(catalog[quantity]))) if good_objects.sum() == 0: image.header['L1FWHM'] = ('NaN', '[arcsec] Frame FWHM in arcsec') image.header['L1ELLIP'] = ('NaN', 'Mean image ellipticity (1-B/A)') image.header['L1ELLIPA'] = ( 'NaN', '[deg] PA of mean image ellipticity') else: seeing = np.median( catalog['fwhm'][good_objects]) * image.pixel_scale image.header['L1FWHM'] = (seeing, '[arcsec] Frame FWHM in arcsec') mean_ellipticity = stats.sigma_clipped_mean( catalog['ellipticity'][good_objects], 3.0) image.header['L1ELLIP'] = (mean_ellipticity, 'Mean image ellipticity (1-B/A)') mean_position_angle = stats.sigma_clipped_mean( catalog['theta'][good_objects], 3.0) image.header['L1ELLIPA'] = ( mean_position_angle, '[deg] PA of mean image ellipticity') logging_tags = { key: float(image.header[key]) for key in [ 'L1MEAN', 'L1MEDIAN', 'L1SIGMA', 'L1FWHM', 'L1ELLIP', 'L1ELLIPA' ] } logger.info('Extracted sources', image=image, extra_tags=logging_tags) # adding catalog (a data table) to the appropriate images attribute. image.data_tables['catalog'] = DataTable(data_table=catalog, name='CAT') except Exception: logger.error(logs.format_exception(), image=image) return image
def AutoPhot(self, Kron_fact=2.5, min_diameter=3.5, write=True): kronrad, krflag = sep.kron_radius(self.dat, self.src['x'], self.src['y'], self.src['a'], self.src['b'], self.src['theta'], 6.) kronrad[np.isnan(kronrad) == True] = 0. flux, fluxerr, flag = sep.sum_ellipse(self.dat, self.src['x'], self.src['y'], self.src['a'], self.src['b'], self.src['theta'], Kron_fact * kronrad, err=self.skysigma, gain=self.gain, subpix=0) flag |= krflag # Combining flags r_min = 0.5 * min_diameter use_circle = kronrad * np.sqrt(self.src['a'] * self.src['b']) < r_min cflux, cfluxerr, cflag = sep.sum_circle(self.dat, self.src['x'][use_circle], self.src['y'][use_circle], r_min, err=self.skysigma, gain=self.gain, subpix=0) flux[use_circle] = cflux fluxerr[use_circle] = cfluxerr flag[use_circle] = cflag mag = self.zmag - 2.5 * np.log10(flux) magerr = (2.5 / np.log(10.0)) * (fluxerr / flux) r, flag = sep.flux_radius(self.dat, self.src['x'], self.src['y'], 6.0 * self.src['a'], 0.5, normflux=flux, subpix=5) ra, dec = self.wcs.all_pix2world(self.src['x'] + 1, self.src['y'] + 1, 1) df = pd.DataFrame( data={ 'x': self.src['x'], 'y': self.src['y'], 'ra': ra, 'dec': dec, 'a': self.src['a'], 'b': self.src['b'], 'theta': self.src['theta'], 'flux': flux, 'e_flux': fluxerr, 'mag': mag, 'e_mag': magerr, 'kronrad': kronrad, 'flxrad': r, 'flag': flag }) if write: df.to_csv(ip.tmp_dir + 'auto_' + self.img.split('.fits')[0] + '.csv') f = open(ip.tmp_dir + 'src_' + self.img.split('.fits')[0] + '.reg', 'w') for i in np.arange(self.nsrc): f.write('{0:.3f} {1:.3f}\n'.format(self.src['x'][i] + 1, self.src['y'][i] + 1)) f.close() return df
def sourcephot(catalogue,image,segmap,detection,instrument='MUSE',dxp=0.,dyp=0., noise=[False],zpab=False, kn=2.5, circap=1.0): """ Get a source catalogue from findsources and a fits image with ZP and compute magnitudes in that filter catalogue -> source cat from findsources image -> fits image with ZP in header segmap -> fits of segmentation map detection -> the detection image, used to compute Kron radius instrument -> if not MUSE, map positions from detection to image dxp,dyp -> shifts in pixel of image to register MUSE and image astrometry noise -> if set to a noise model, use equation noise[0]*noise[1]*npix**noise[2] to compute the error zpab -> if ZPAB (zeropoint AB) not stored in header, must be supplied kn -> factor to be used when scaling Kron apertures [sextractor default 2.5] circap -> radius in arcsec for aperture photmetry to be used when Kron aperture fails """ from astropy.io import fits import numpy as np import sep import matplotlib.pyplot as plt from astropy.table import Table from astropy import wcs #grab root name rname=((image.split('/')[-1]).split('.fits'))[0] print ('Working on {}'.format(rname)) #open the catalogue/fits cat=fits.open(catalogue) img=fits.open(image) seg=fits.open(segmap) det=fits.open(detection) #grab reference wcs from detection image try: wref=wcs.WCS(det[1].header) except: wref = wcs.WCS(det[0].header) psref=wref.pixel_scale_matrix[1,1]*3600. print ('Reference pixel size {}'.format(psref)) #if not handling MUSE, special cases for format of data if('MUSE' not in instrument): #handle instrument cases if('LRIS' in instrument): #data imgdata=img[1].data #place holder for varaince as will use noise model below vardata=imgdata*0+1 vardata=vardata.byteswap(True).newbyteorder() #grab wcs image wimg=wcs.WCS(img[1].header) psimg=wimg.pixel_scale_matrix[1,1]*3600. #store the ZP if(zpab): img[0].header['ZPAB']=zpab else: print('Instrument not supported!!') exit() else: #for muse, keep eveything the same imgdata=img[0].data vardata=img[1].data psimg=psref #grab flux and var dataflx=np.nan_to_num(imgdata.byteswap(True).newbyteorder()) datavar=np.nan_to_num(vardata.byteswap(True).newbyteorder()) # import pdb; pdb.set_trace() #grab detection and seg mask try: detflx=np.nan_to_num(det[1].data.byteswap(True).newbyteorder()) except: detflx = np.nan_to_num(det[0].data.byteswap(True).newbyteorder()) #go back to 1d if(len(seg[0].data.shape)>2): segmask=(np.nan_to_num(seg[0].data.byteswap(True).newbyteorder()))[0,:,:] else: segmask=(np.nan_to_num(seg[0].data.byteswap(True).newbyteorder())) #if needed, map the segmap to new image with transformation if('MUSE' not in instrument): #allocate space for transformed segmentation map segmasktrans=np.zeros(dataflx.shape) print("Remapping segmentation map to new image...") #loop over original segmap and map to trasformed one #Just use nearest pixel, and keep only 1 when multiple choices for xx in range(segmask.shape[0]): for yy in range(segmask.shape[1]): #go to world radec=wref.wcs_pix2world([[yy,xx]],0) #back to new instrument pixel newxy=wimg.wcs_world2pix(radec,0) #apply shift to register WCS newxy[0][1]=newxy[0][1]+dyp newxy[0][0]=newxy[0][0]+dxp segmasktrans[newxy[0][1],newxy[0][0]]=segmask[xx,yy] #grow buffer as needed by individual instruments #This accounts for resampling to finer pixel size if('LRIS' in instrument): segmasktrans[newxy[0][1]+1,newxy[0][0]+1]=segmask[xx,yy] segmasktrans[newxy[0][1]-1,newxy[0][0]-1]=segmask[xx,yy] segmasktrans[newxy[0][1]+1,newxy[0][0]-1]=segmask[xx,yy] segmasktrans[newxy[0][1]-1,newxy[0][0]+1]=segmask[xx,yy] segmasktrans[newxy[0][1]+1,newxy[0][0]]=segmask[xx,yy] segmasktrans[newxy[0][1]-1,newxy[0][0]]=segmask[xx,yy] segmasktrans[newxy[0][1],newxy[0][0]-1]=segmask[xx,yy] segmasktrans[newxy[0][1],newxy[0][0]+1]=segmask[xx,yy] #dump the transformed segmap for checking hdumain = fits.PrimaryHDU(segmasktrans,header=img[1].header) hdulist = fits.HDUList(hdumain) hdulist.writeto("{}_segremap.fits".format(rname),clobber=True) else: #no transformation needed segmasktrans=segmask #source to extract nsrc=len(cat[1].data) print('Extract photometry for {} sources'.format(nsrc)) phot = Table(names=('ID', 'MAGAP', 'MAGAP_ERR','FXAP', 'FXAP_ERR', 'RAD', 'MAGSEG', 'MAGSEG_ERR', 'FXSEG', 'FXSEG_ERR','ZP'), dtype=('i4','f4','f4','f4','f4','f4','f4','f4','f4','f4','f4')) #create check aperture mask checkaperture=np.zeros(dataflx.shape) print('Computing photometry for objects...') #loop over each source for idobj in range(nsrc): ######### #Find positions etc and transform as appropriate ######### #extract MUSE source paramaters x= cat[1].data['x'][idobj] y= cat[1].data['y'][idobj] a= cat[1].data['a'][idobj] b= cat[1].data['b'][idobj] theta= cat[1].data['theta'][idobj] #compute kron radius on MUSE detection image #Kron rad in units of a,b tmpdata=np.copy(detflx) tmpmask=np.copy(segmask) #mask all other sources to avoid overlaps but keep desired one pixels=np.where(tmpmask == idobj+1) tmpmask[pixels]=0 #compute kron radius [pixel of reference image] kronrad, flg = sep.kron_radius(tmpdata,x,y,a,b,theta,6.0,mask=tmpmask) #plt.imshow(np.log10(tmpdata+1),origin='low') #plt.show() #exit() #now check if size is sensible in units of MUSE data rmin = 2.0 #MUSE pix use_circle = kronrad * np.sqrt(a*b) < rmin #use circular aperture of 2" in muse pixel unit rcircap = circap/psref #now use info to compute photometry and apply #spatial transformation if needed if('MUSE' not in instrument): #map centre of aperture - +1 reference #go to world radec=wref.wcs_pix2world([[x,y]],1) #back to new instrument pixel newxy=wimg.wcs_world2pix(radec,1) #apply shift to register WCS xphot=newxy[0][0]+dxp yphot=newxy[0][1]+dyp #scale radii to new pixel size rminphot=rcircap*psref/psimg aphot=a*psref/psimg bphot=b*psref/psimg #Kron radius in units of a,b else: #for muse, transfer to same units xphot=x yphot=y rminphot=rcircap aphot=a bphot=b ##### #Compute local sky ##### skyreg=kn*kronrad*np.sqrt(aphot*bphot)+15 if (yphot-skyreg < 0.0): yphot=skyreg if (xphot-skyreg < 0.0): xphot=skyreg if (yphot+skyreg > segmasktrans.shape[0]-1): yphot=segmasktrans.shape[0]-1-skyreg if (xphot+skyreg > segmasktrans.shape[1]-1): xphot=segmasktrans.shape[1]-1-skyreg #print(int(yphot-skyreg),int(yphot+skyreg),int(xphot-skyreg),int(xphot+skyreg)) cutskymask=segmasktrans[int(yphot-skyreg):int(yphot+skyreg),int(xphot-skyreg):int(xphot+skyreg)] cutskydata=dataflx[int(yphot-skyreg):int(yphot+skyreg),int(xphot-skyreg):int(xphot+skyreg)] skymedian=np.nan_to_num(np.median(cutskydata[np.where(cutskymask < 1.0)])) #print skymedian #plt.imshow(cutskymask,origin='low') #plt.show() #if(idobj > 30): # exit() ######### #Now grab the Kron mag computed using detection image ######### #mask all other objects to avoid blending tmpdata=np.copy(dataflx) #apply local sky subtraction tmpdata=tmpdata-skymedian tmpvar=np.copy(datavar) tmpmask=np.copy(segmasktrans) pixels=np.where(tmpmask == idobj+1) tmpmask[pixels]=0 #plt.imshow(tmpmask,origin='low') #plt.show() #exit() #circular aperture if(use_circle): #flux in circular aperture flux_kron, err, flg = sep.sum_circle(tmpdata,xphot,yphot,rminphot,mask=tmpmask) #propagate variance fluxvar, err, flg = sep.sum_circle(tmpvar,xphot,yphot,rminphot,mask=tmpmask) #store Rused in arcsec rused=rminphot*psimg #update check aperture tmpcheckaper=np.zeros(dataflx.shape,dtype=bool) sep.mask_ellipse(tmpcheckaper,xphot,yphot,1.,1.,0.,r=rminphot) checkaperture=checkaperture+tmpcheckaper*(idobj+1) #kron apertures else: #kron flux flux_kron, err, flg = sep.sum_ellipse(tmpdata,xphot, yphot, aphot, bphot, theta, kn*kronrad, mask=tmpmask) #propagate variance fluxvar, err, flg = sep.sum_ellipse(tmpvar,xphot,yphot, aphot, bphot, theta, kn*kronrad, mask=tmpmask) #translate in radius rused=kn*kronrad*psimg*np.sqrt(aphot*bphot) #update check aperture tmpcheckaper=np.zeros(dataflx.shape,dtype=bool) sep.mask_ellipse(tmpcheckaper,xphot,yphot,aphot,bphot,theta,r=kn*kronrad) checkaperture=checkaperture+tmpcheckaper*(idobj+1) #compute error for aperture if(noise[0]): #use model appix=np.where(tmpcheckaper > 0) errflux_kron=noise[0]*noise[1]*len(appix[0])**noise[2] else: #propagate variance errflux_kron=np.sqrt(fluxvar) #go to mag if(flux_kron > 0): mag_aper=-2.5*np.log10(flux_kron)+img[0].header['ZPAB'] errmg_aper=2.5*np.log10(1.+errflux_kron/flux_kron) else: mag_aper=99.0 errmg_aper=99.0 #find out if non detections if(errflux_kron >= flux_kron): errmg_aper=9 mag_aper=-2.5*np.log10(2.*errflux_kron)+img[0].header['ZPAB'] ####### #grab the photometry in the segmentation map ##### #This may not work well for other instruments #if images are not well aligned pixels=np.where(segmasktrans == idobj+1) #add flux in pixels tmpdata=np.copy(dataflx) #apply sky sub tmpdata=tmpdata-skymedian flux_seg=np.sum(tmpdata[pixels]) #compute noise from model or adding variance if(noise[0]): #from model errfx_seg=noise[0]*noise[1]*len(pixels[0])**noise[2] else: #add variance in pixels to compute error errfx_seg=np.sqrt(np.sum(datavar[pixels])) #go to mag with calibrations if(flux_seg > 0): mag_seg=-2.5*np.log10(flux_seg)+img[0].header['ZPAB'] errmg_seg=2.5*np.log10(1.+errfx_seg/flux_seg) else: mag_seg=99.0 errmg_seg=99.0 #find out if non detections if(errfx_seg >= flux_seg): errmg_seg=9 mag_seg=-2.5*np.log10(2.*errfx_seg)+img[0].header['ZPAB'] #stash by id phot.add_row((idobj+1,mag_aper,errmg_aper,flux_kron,errflux_kron,rused,mag_seg,errmg_seg, flux_seg,errfx_seg,img[0].header['ZPAB'])) #dump the aperture check image hdumain = fits.PrimaryHDU(checkaperture,header=img[1].header) hdulist = fits.HDUList(hdumain) hdulist.writeto("{}_aper.fits".format(rname),clobber=True) #close cat.close() img.close() seg.close() det.close() return phot
def test_masked_segmentation_measurements(): """Test measurements with segmentation masking""" NX = 100 data = np.zeros((NX * 2, NX * 2)) yp, xp = np.indices(data.shape) #### # Make two 2D gaussians that slightly overlap # width of the 2D objects gsigma = 10. # offset between two gaussians in sigmas off = 4 for xy in [[NX, NX], [NX + off * gsigma, NX + off * gsigma]]: R = np.sqrt((xp - xy[0])**2 + (yp - xy[1])**2) g_i = np.exp(-R**2 / 2 / gsigma**2) data += g_i # Absolute total total_exact = g_i.sum() # Add some noise rms = 0.02 np.random.seed(1) data += np.random.normal(size=data.shape) * rms # Run source detection objs, segmap = sep.extract(data, thresh=1.2, err=rms, mask=None, segmentation_map=True) seg_id = np.arange(1, len(objs) + 1, dtype=np.int32) # Compute Kron/Auto parameters x, y, a, b = objs['x'], objs['y'], objs['a'], objs['b'] theta = objs['theta'] kronrad, krflag = sep.kron_radius(data, x, y, a, b, theta, 6.0) flux_auto, fluxerr, flag = sep.sum_ellipse(data, x, y, a, b, theta, 2.5 * kronrad, segmap=segmap, seg_id=seg_id, subpix=1) # Test total flux assert_allclose(flux_auto, total_exact, rtol=5.e-2) # Flux_radius for flux_fraction in [0.2, 0.5]: # Exact solution rhalf_exact = np.sqrt(-np.log(1 - flux_fraction) * gsigma**2 * 2) # Masked measurement flux_radius, flag = sep.flux_radius(data, x, y, 6. * a, flux_fraction, seg_id=seg_id, segmap=segmap, normflux=flux_auto, subpix=5) # Test flux fraction assert_allclose(flux_radius, rhalf_exact, rtol=5.e-2) if False: print('test_masked_flux_radius') print(total_exact, flux_auto) print(rhalf_exact, flux_radius)
def _measure(self, img, sources, mask=None): logger.info('measuring source parameters') # HACK: issues with numerical precision # must have pi/2 <= theta <= npi/2 sources[np.abs(np.abs(sources['theta']) - np.pi / 2) < 1e-6] = np.pi / 2 for p in ['x', 'y', 'a', 'b', 'theta']: sources = sources[~np.isnan(sources[p])] # calculate "AUTO" parameters kronrad, krflag = sep.kron_radius(img, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 6.0, mask=mask) flux, fluxerr, flag = sep.sum_ellipse(img, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 2.5 * kronrad, subpix=5, mask=mask) flag |= krflag # combine flags into 'flag' sources = sources[~np.isnan(flux)] flux = flux[~np.isnan(flux)] sources = sources[flux > 0] flux = flux[flux > 0] mag_auto = self.zpt - 2.5 * np.log10(flux) r, flag = sep.flux_radius(img, sources['x'], sources['y'], 6. * sources['a'], 0.5, normflux=flux, subpix=5, mask=mask) sources['mag_auto'] = mag_auto sources['flux_auto'] = flux sources['flux_radius'] = r * self.pixscale # approximate fwhm r_squared = sources['a']**2 + sources['b']**2 sources['fwhm'] = 2 * np.sqrt(np.log(2) * r_squared) * self.pixscale q = sources['b'] / sources['a'] area = np.pi * q * sources['flux_radius']**2 sources['mu_ave_auto'] = sources['mag_auto'] + 2.5 * np.log10(2 * area) area_arcsec = np.pi * (self.psf_fwhm / 2)**2 * self.pixscale**2 flux, fluxerr, flag = sep.sum_circle(img, sources['x'], sources['y'], self.psf_fwhm / 2, subpix=5, mask=mask) flux[flux <= 0] = np.nan mu_0 = self.zpt - 2.5 * np.log10(flux / area_arcsec) sources['mu_0_aper'] = mu_0 return sources
def test_vs_sextractor(): data = np.copy(image_data) # make an explicit copy so we can 'subfrom' bkg = sep.Background(data, bw=64, bh=64, fw=3, fh=3) # Test that SExtractor background is same as SEP: bkgarr = bkg.back(dtype=np.float32) assert_allclose(bkgarr, image_refback, rtol=1.e-5) # Extract objects bkg.subfrom(data) objs = sep.extract(data, 1.5*bkg.globalrms) objs = np.sort(objs, order=['y']) # Read SExtractor result refobjs = np.loadtxt(IMAGECAT_FNAME, dtype=IMAGECAT_DTYPE) refobjs = np.sort(refobjs, order=['y']) # Found correct number of sources at the right locations? assert_allclose(objs['x'], refobjs['x'] - 1., atol=1.e-3) assert_allclose(objs['y'], refobjs['y'] - 1., atol=1.e-3) # Test aperture flux flux, fluxerr, flag = sep.sum_circle(data, objs['x'], objs['y'], 5., err=bkg.globalrms) assert_allclose(flux, refobjs['flux_aper'], rtol=2.e-4) assert_allclose(fluxerr, refobjs['fluxerr_aper'], rtol=1.0e-5) # check if the flags work at all (comparison values assert ((flag & sep.APER_TRUNC) != 0).sum() == 4 assert ((flag & sep.APER_HASMASKED) != 0).sum() == 0 # Test "flux_auto" kr, flag = sep.kron_radius(data, objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], 6.0) flux, fluxerr, flag = sep.sum_ellipse(data, objs['x'], objs['y'], objs['a'], objs['b'], objs['theta'], r=2.5 * kr, err=bkg.globalrms, subpix=1) # For some reason, object at index 59 doesn't match. It's very small # and kron_radius is set to 0.0 in SExtractor, but 0.08 in sep. # Most of the other values are within 1e-4 except one which is only # within 0.01. This might be due to a change in SExtractor between # v2.8.6 (used to generate "truth" catalog) and v2.18.11. kr[59] = 0.0 flux[59] = 0.0 fluxerr[59] = 0.0 assert_allclose(2.5*kr, refobjs['kron_radius'], rtol=0.01) assert_allclose(flux, refobjs['flux_auto'], rtol=0.01) assert_allclose(fluxerr, refobjs['fluxerr_auto'], rtol=0.01) # Test ellipse representation conversion cxx, cyy, cxy = sep.ellipse_coeffs(objs['a'], objs['b'], objs['theta']) assert_allclose(cxx, objs['cxx'], rtol=1.e-4) assert_allclose(cyy, objs['cyy'], rtol=1.e-4) assert_allclose(cxy, objs['cxy'], rtol=1.e-4) a, b, theta = sep.ellipse_axes(objs['cxx'], objs['cyy'], objs['cxy']) assert_allclose(a, objs['a'], rtol=1.e-4) assert_allclose(b, objs['b'], rtol=1.e-4) assert_allclose(theta, objs['theta'], rtol=1.e-4) #test round trip cxx, cyy, cxy = sep.ellipse_coeffs(a, b, theta) assert_allclose(cxx, objs['cxx'], rtol=1.e-4) assert_allclose(cyy, objs['cyy'], rtol=1.e-4) assert_allclose(cxy, objs['cxy'], rtol=1.e-4) # test flux_radius fr, flags = sep.flux_radius(data, objs['x'], objs['y'], 6.*refobjs['a'], [0.1, 0.5, 0.6], normflux=refobjs['flux_auto'], subpix=5) assert_allclose(fr, refobjs["flux_radius"], rtol=0.04, atol=0.01) # test winpos sig = 2. / 2.35 * fr[:, 1] # flux_radius = 0.5 xwin, ywin, flag = sep.winpos(data, objs['x'], objs['y'], sig) assert_allclose(xwin, refobjs["xwin"] - 1., rtol=0., atol=0.0025) assert_allclose(ywin, refobjs["ywin"] - 1., rtol=0., atol=0.0025)
def kron_info(objects, tbdat_sub): kronrad, kronflag = sep.kron_radius(tbdat_sub, objects['x'], objects['y'], objects['a'], objects['b'], objects['theta'], r=6.0) return kronrad, kronflag
def make_catalog(data, header): # Set the number of source pixels to be 5% of the total. This keeps us safe from # satellites and airplanes. sep.set_extract_pixstack(int(data.shape[1] * data.shape[0] * 0.05)) data = data.copy() error = (np.abs(data) + header['RDNOISE']**2.0)**0.5 mask = data > 0.9 * header['SATURATE'] # Fits can be backwards byte order, so fix that if need be and subtract # the background try: bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) except ValueError: data = data.byteswap(True).newbyteorder() bkg = sep.Background(data, mask=mask, bw=32, bh=32, fw=3, fh=3) bkg.subfrom(data) # Do an initial source detection sources = sep.extract(data, THRESHOLD, mask=mask, minarea=MIN_AREA, err=error, deblend_cont=0.005) # Convert the detections into a table sources = Table(sources) # We remove anything with a detection flag >= 8 # This includes memory overflows and objects that are too close the edge sources = sources[sources['flag'] < 8] sources = prune_nans_from_table(sources) # Calculate the ellipticity sources['ellipticity'] = 1.0 - (sources['b'] / sources['a']) # Fix any value of theta that are invalid due to floating point rounding # -pi / 2 < theta < pi / 2 sources['theta'][sources['theta'] > (np.pi / 2.0)] -= np.pi sources['theta'][sources['theta'] < (-np.pi / 2.0)] += np.pi # Calculate the kron radius kronrad, krflag = sep.kron_radius(data, sources['x'], sources['y'], sources['a'], sources['b'], sources['theta'], 6.0) sources['flag'] |= krflag sources['kronrad'] = kronrad # Calcuate the equivilent of flux_auto flux, fluxerr, flag = sep.sum_ellipse(data, sources['x'], sources['y'], sources['a'], sources['b'], np.pi / 2.0, 2.5 * kronrad, subpix=1, err=error) sources['flux'] = flux sources['fluxerr'] = fluxerr sources['flag'] |= flag # Calculate the FWHMs of the stars: fwhm = 2.0 * (np.log(2) * (sources['a']**2.0 + sources['b']**2.0))**0.5 sources['fwhm'] = fwhm # Cut individual bright pixels. Often cosmic rays sources = sources[fwhm > 1.0] # Update the catalog to match fits convention instead of python array convention sources['x'] += 1.0 sources['y'] += 1.0 sources['xpeak'] += 1 sources['ypeak'] += 1 sources['theta'] = np.degrees(sources['theta']) return save_catalog_meta_data(sources)